repo_id stringclasses 875
values | size int64 974 38.9k | file_path stringlengths 10 308 | content stringlengths 974 38.9k |
|---|---|---|---|
googleapis/google-cloud-java | 35,818 | java-securitycentermanagement/proto-google-cloud-securitycentermanagement-v1/src/main/java/com/google/cloud/securitycentermanagement/v1/ListSecurityHealthAnalyticsCustomModulesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/securitycentermanagement/v1/security_center_management.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.securitycentermanagement.v1;
/**
*
*
* <pre>
* Request message for
* [SecurityCenterManagement.ListSecurityHealthAnalyticsCustomModules][google.cloud.securitycentermanagement.v1.SecurityCenterManagement.ListSecurityHealthAnalyticsCustomModules].
* </pre>
*
* Protobuf type {@code
* google.cloud.securitycentermanagement.v1.ListSecurityHealthAnalyticsCustomModulesRequest}
*/
public final class ListSecurityHealthAnalyticsCustomModulesRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.securitycentermanagement.v1.ListSecurityHealthAnalyticsCustomModulesRequest)
ListSecurityHealthAnalyticsCustomModulesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListSecurityHealthAnalyticsCustomModulesRequest.newBuilder() to construct.
private ListSecurityHealthAnalyticsCustomModulesRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListSecurityHealthAnalyticsCustomModulesRequest() {
parent_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListSecurityHealthAnalyticsCustomModulesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycentermanagement.v1.SecurityCenterManagementProto
.internal_static_google_cloud_securitycentermanagement_v1_ListSecurityHealthAnalyticsCustomModulesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycentermanagement.v1.SecurityCenterManagementProto
.internal_static_google_cloud_securitycentermanagement_v1_ListSecurityHealthAnalyticsCustomModulesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest.class,
com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Name of the parent organization, folder, or project in which to
* list custom modules, in one of the following formats:
*
* * `organizations/{organization}/locations/{location}`
* * `folders/{folder}/locations/{location}`
* * `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Name of the parent organization, folder, or project in which to
* list custom modules, in one of the following formats:
*
* * `organizations/{organization}/locations/{location}`
* * `folders/{folder}/locations/{location}`
* * `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. The maximum number of results to return in a single response.
* Default is 10, minimum is 1, maximum is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous request. Provide this
* token to retrieve the next page of results.
*
* When paginating, the rest of the request must match the request that
* generated the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous request. Provide this
* token to retrieve the next page of results.
*
* When paginating, the rest of the request must match the request that
* generated the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof
com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest)) {
return super.equals(obj);
}
com.google.cloud.securitycentermanagement.v1.ListSecurityHealthAnalyticsCustomModulesRequest
other =
(com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest)
obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.securitycentermanagement.v1.ListSecurityHealthAnalyticsCustomModulesRequest
prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [SecurityCenterManagement.ListSecurityHealthAnalyticsCustomModules][google.cloud.securitycentermanagement.v1.SecurityCenterManagement.ListSecurityHealthAnalyticsCustomModules].
* </pre>
*
* Protobuf type {@code
* google.cloud.securitycentermanagement.v1.ListSecurityHealthAnalyticsCustomModulesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.securitycentermanagement.v1.ListSecurityHealthAnalyticsCustomModulesRequest)
com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycentermanagement.v1.SecurityCenterManagementProto
.internal_static_google_cloud_securitycentermanagement_v1_ListSecurityHealthAnalyticsCustomModulesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycentermanagement.v1.SecurityCenterManagementProto
.internal_static_google_cloud_securitycentermanagement_v1_ListSecurityHealthAnalyticsCustomModulesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest.class,
com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest.Builder.class);
}
// Construct using
// com.google.cloud.securitycentermanagement.v1.ListSecurityHealthAnalyticsCustomModulesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.securitycentermanagement.v1.SecurityCenterManagementProto
.internal_static_google_cloud_securitycentermanagement_v1_ListSecurityHealthAnalyticsCustomModulesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest
getDefaultInstanceForType() {
return com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest
build() {
com.google.cloud.securitycentermanagement.v1.ListSecurityHealthAnalyticsCustomModulesRequest
result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest
buildPartial() {
com.google.cloud.securitycentermanagement.v1.ListSecurityHealthAnalyticsCustomModulesRequest
result =
new com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.securitycentermanagement.v1.ListSecurityHealthAnalyticsCustomModulesRequest
result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof
com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest) {
return mergeFrom(
(com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest)
other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.securitycentermanagement.v1.ListSecurityHealthAnalyticsCustomModulesRequest
other) {
if (other
== com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Name of the parent organization, folder, or project in which to
* list custom modules, in one of the following formats:
*
* * `organizations/{organization}/locations/{location}`
* * `folders/{folder}/locations/{location}`
* * `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the parent organization, folder, or project in which to
* list custom modules, in one of the following formats:
*
* * `organizations/{organization}/locations/{location}`
* * `folders/{folder}/locations/{location}`
* * `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the parent organization, folder, or project in which to
* list custom modules, in one of the following formats:
*
* * `organizations/{organization}/locations/{location}`
* * `folders/{folder}/locations/{location}`
* * `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the parent organization, folder, or project in which to
* list custom modules, in one of the following formats:
*
* * `organizations/{organization}/locations/{location}`
* * `folders/{folder}/locations/{location}`
* * `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the parent organization, folder, or project in which to
* list custom modules, in one of the following formats:
*
* * `organizations/{organization}/locations/{location}`
* * `folders/{folder}/locations/{location}`
* * `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. The maximum number of results to return in a single response.
* Default is 10, minimum is 1, maximum is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. The maximum number of results to return in a single response.
* Default is 10, minimum is 1, maximum is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum number of results to return in a single response.
* Default is 10, minimum is 1, maximum is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous request. Provide this
* token to retrieve the next page of results.
*
* When paginating, the rest of the request must match the request that
* generated the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous request. Provide this
* token to retrieve the next page of results.
*
* When paginating, the rest of the request must match the request that
* generated the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous request. Provide this
* token to retrieve the next page of results.
*
* When paginating, the rest of the request must match the request that
* generated the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous request. Provide this
* token to retrieve the next page of results.
*
* When paginating, the rest of the request must match the request that
* generated the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous request. Provide this
* token to retrieve the next page of results.
*
* When paginating, the rest of the request must match the request that
* generated the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.securitycentermanagement.v1.ListSecurityHealthAnalyticsCustomModulesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.securitycentermanagement.v1.ListSecurityHealthAnalyticsCustomModulesRequest)
private static final com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest();
}
public static com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListSecurityHealthAnalyticsCustomModulesRequest>
PARSER =
new com.google.protobuf.AbstractParser<
ListSecurityHealthAnalyticsCustomModulesRequest>() {
@java.lang.Override
public ListSecurityHealthAnalyticsCustomModulesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException()
.setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListSecurityHealthAnalyticsCustomModulesRequest>
parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListSecurityHealthAnalyticsCustomModulesRequest>
getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.securitycentermanagement.v1
.ListSecurityHealthAnalyticsCustomModulesRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,784 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/GetIamPolicyInstantSnapshotRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* A request message for InstantSnapshots.GetIamPolicy. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest}
*/
public final class GetIamPolicyInstantSnapshotRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest)
GetIamPolicyInstantSnapshotRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetIamPolicyInstantSnapshotRequest.newBuilder() to construct.
private GetIamPolicyInstantSnapshotRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetIamPolicyInstantSnapshotRequest() {
project_ = "";
resource_ = "";
zone_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new GetIamPolicyInstantSnapshotRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyInstantSnapshotRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyInstantSnapshotRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest.class,
com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest.Builder.class);
}
private int bitField0_;
public static final int OPTIONS_REQUESTED_POLICY_VERSION_FIELD_NUMBER = 499220029;
private int optionsRequestedPolicyVersion_ = 0;
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return Whether the optionsRequestedPolicyVersion field is set.
*/
@java.lang.Override
public boolean hasOptionsRequestedPolicyVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return The optionsRequestedPolicyVersion.
*/
@java.lang.Override
public int getOptionsRequestedPolicyVersion() {
return optionsRequestedPolicyVersion_;
}
public static final int PROJECT_FIELD_NUMBER = 227560217;
@SuppressWarnings("serial")
private volatile java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
@java.lang.Override
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RESOURCE_FIELD_NUMBER = 195806222;
@SuppressWarnings("serial")
private volatile java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
@java.lang.Override
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
@java.lang.Override
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ZONE_FIELD_NUMBER = 3744684;
@SuppressWarnings("serial")
private volatile java.lang.Object zone_ = "";
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The zone.
*/
@java.lang.Override
public java.lang.String getZone() {
java.lang.Object ref = zone_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
zone_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for zone.
*/
@java.lang.Override
public com.google.protobuf.ByteString getZoneBytes() {
java.lang.Object ref = zone_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
zone_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3744684, zone_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 195806222, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeInt32(499220029, optionsRequestedPolicyVersion_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3744684, zone_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(195806222, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeInt32Size(
499220029, optionsRequestedPolicyVersion_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest other =
(com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest) obj;
if (hasOptionsRequestedPolicyVersion() != other.hasOptionsRequestedPolicyVersion())
return false;
if (hasOptionsRequestedPolicyVersion()) {
if (getOptionsRequestedPolicyVersion() != other.getOptionsRequestedPolicyVersion())
return false;
}
if (!getProject().equals(other.getProject())) return false;
if (!getResource().equals(other.getResource())) return false;
if (!getZone().equals(other.getZone())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasOptionsRequestedPolicyVersion()) {
hash = (37 * hash) + OPTIONS_REQUESTED_POLICY_VERSION_FIELD_NUMBER;
hash = (53 * hash) + getOptionsRequestedPolicyVersion();
}
hash = (37 * hash) + PROJECT_FIELD_NUMBER;
hash = (53 * hash) + getProject().hashCode();
hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getResource().hashCode();
hash = (37 * hash) + ZONE_FIELD_NUMBER;
hash = (53 * hash) + getZone().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request message for InstantSnapshots.GetIamPolicy. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest)
com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyInstantSnapshotRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyInstantSnapshotRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest.class,
com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest.Builder.class);
}
// Construct using com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
optionsRequestedPolicyVersion_ = 0;
project_ = "";
resource_ = "";
zone_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyInstantSnapshotRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest
getDefaultInstanceForType() {
return com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest build() {
com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest buildPartial() {
com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest result =
new com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.optionsRequestedPolicyVersion_ = optionsRequestedPolicyVersion_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.project_ = project_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.resource_ = resource_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.zone_ = zone_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest) {
return mergeFrom((com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest other) {
if (other
== com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest.getDefaultInstance())
return this;
if (other.hasOptionsRequestedPolicyVersion()) {
setOptionsRequestedPolicyVersion(other.getOptionsRequestedPolicyVersion());
}
if (!other.getProject().isEmpty()) {
project_ = other.project_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getResource().isEmpty()) {
resource_ = other.resource_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getZone().isEmpty()) {
zone_ = other.zone_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 29957474:
{
zone_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 29957474
case 1566449778:
{
resource_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 1566449778
case 1820481738:
{
project_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 1820481738
case -301207064:
{
optionsRequestedPolicyVersion_ = input.readInt32();
bitField0_ |= 0x00000001;
break;
} // case -301207064
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int optionsRequestedPolicyVersion_;
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return Whether the optionsRequestedPolicyVersion field is set.
*/
@java.lang.Override
public boolean hasOptionsRequestedPolicyVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return The optionsRequestedPolicyVersion.
*/
@java.lang.Override
public int getOptionsRequestedPolicyVersion() {
return optionsRequestedPolicyVersion_;
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @param value The optionsRequestedPolicyVersion to set.
* @return This builder for chaining.
*/
public Builder setOptionsRequestedPolicyVersion(int value) {
optionsRequestedPolicyVersion_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return This builder for chaining.
*/
public Builder clearOptionsRequestedPolicyVersion() {
bitField0_ = (bitField0_ & ~0x00000001);
optionsRequestedPolicyVersion_ = 0;
onChanged();
return this;
}
private java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The project to set.
* @return This builder for chaining.
*/
public Builder setProject(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearProject() {
project_ = getDefaultInstance().getProject();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for project to set.
* @return This builder for chaining.
*/
public Builder setProjectBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The resource to set.
* @return This builder for chaining.
*/
public Builder setResource(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resource_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearResource() {
resource_ = getDefaultInstance().getResource();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for resource to set.
* @return This builder for chaining.
*/
public Builder setResourceBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resource_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object zone_ = "";
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The zone.
*/
public java.lang.String getZone() {
java.lang.Object ref = zone_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
zone_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for zone.
*/
public com.google.protobuf.ByteString getZoneBytes() {
java.lang.Object ref = zone_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
zone_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The zone to set.
* @return This builder for chaining.
*/
public Builder setZone(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
zone_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearZone() {
zone_ = getDefaultInstance().getZone();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for zone to set.
* @return This builder for chaining.
*/
public Builder setZoneBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
zone_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest)
private static final com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest();
}
public static com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GetIamPolicyInstantSnapshotRequest> PARSER =
new com.google.protobuf.AbstractParser<GetIamPolicyInstantSnapshotRequest>() {
@java.lang.Override
public GetIamPolicyInstantSnapshotRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<GetIamPolicyInstantSnapshotRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetIamPolicyInstantSnapshotRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyInstantSnapshotRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hbase | 36,083 | hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcConnection.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.ipc;
import static org.apache.hadoop.hbase.HConstants.RPC_HEADER;
import io.opentelemetry.api.GlobalOpenTelemetry;
import io.opentelemetry.api.trace.Span;
import io.opentelemetry.context.Context;
import io.opentelemetry.context.Scope;
import io.opentelemetry.context.propagation.TextMapGetter;
import java.io.Closeable;
import java.io.DataOutputStream;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.security.GeneralSecurityException;
import java.security.cert.X509Certificate;
import java.util.Collections;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import org.apache.commons.crypto.cipher.CryptoCipherFactory;
import org.apache.commons.crypto.random.CryptoRandom;
import org.apache.commons.crypto.random.CryptoRandomFactory;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.ExtendedCellScanner;
import org.apache.hadoop.hbase.client.ConnectionRegistryEndpoint;
import org.apache.hadoop.hbase.client.VersionInfoUtil;
import org.apache.hadoop.hbase.codec.Codec;
import org.apache.hadoop.hbase.io.ByteBufferOutputStream;
import org.apache.hadoop.hbase.io.crypto.aes.CryptoAES;
import org.apache.hadoop.hbase.ipc.RpcServer.CallCleanup;
import org.apache.hadoop.hbase.nio.ByteBuff;
import org.apache.hadoop.hbase.regionserver.RegionServerAbortedException;
import org.apache.hadoop.hbase.security.AccessDeniedException;
import org.apache.hadoop.hbase.security.HBaseSaslRpcServer;
import org.apache.hadoop.hbase.security.SaslStatus;
import org.apache.hadoop.hbase.security.SaslUtil;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.provider.SaslServerAuthenticationProvider;
import org.apache.hadoop.hbase.security.provider.SaslServerAuthenticationProviders;
import org.apache.hadoop.hbase.security.provider.SimpleSaslServerAuthenticationProvider;
import org.apache.hadoop.hbase.trace.TraceUtil;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.authorize.AuthorizationException;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.com.google.common.collect.Maps;
import org.apache.hbase.thirdparty.com.google.protobuf.BlockingService;
import org.apache.hbase.thirdparty.com.google.protobuf.ByteInput;
import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
import org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream;
import org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.MethodDescriptor;
import org.apache.hbase.thirdparty.com.google.protobuf.Message;
import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;
import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.SecurityPreamableResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RegistryProtos.GetConnectionRegistryResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo;
/** Reads calls from a connection and queues them for handling. */
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "VO_VOLATILE_INCREMENT",
justification = "False positive according to http://sourceforge.net/p/findbugs/bugs/1032/")
@InterfaceAudience.Private
abstract class ServerRpcConnection implements Closeable {
private static final TextMapGetter<RPCTInfo> getter = new RPCTInfoGetter();
protected final RpcServer rpcServer;
// If the connection header has been read or not.
protected boolean connectionHeaderRead = false;
protected CallCleanup callCleanup;
// Cache the remote host & port info so that even if the socket is
// disconnected, we can say where it used to connect to.
protected String hostAddress;
protected int remotePort;
protected InetAddress addr;
protected ConnectionHeader connectionHeader;
protected Map<String, byte[]> connectionAttributes;
/**
* Codec the client asked use.
*/
protected Codec codec;
/**
* Compression codec the client asked us use.
*/
protected CompressionCodec compressionCodec;
protected BlockingService service;
protected SaslServerAuthenticationProvider provider;
protected boolean skipInitialSaslHandshake;
protected boolean useSasl;
protected HBaseSaslRpcServer saslServer;
// was authentication allowed with a fallback to simple auth
protected boolean authenticatedWithFallback;
protected boolean retryImmediatelySupported = false;
protected User user = null;
protected UserGroupInformation ugi = null;
protected SaslServerAuthenticationProviders saslProviders = null;
protected X509Certificate[] clientCertificateChain = null;
public ServerRpcConnection(RpcServer rpcServer) {
this.rpcServer = rpcServer;
this.callCleanup = null;
this.saslProviders = SaslServerAuthenticationProviders.getInstance(rpcServer.getConf());
}
@Override
public String toString() {
return getHostAddress() + ":" + remotePort;
}
public String getHostAddress() {
return hostAddress;
}
public InetAddress getHostInetAddress() {
return addr;
}
public int getRemotePort() {
return remotePort;
}
public VersionInfo getVersionInfo() {
if (connectionHeader != null && connectionHeader.hasVersionInfo()) {
return connectionHeader.getVersionInfo();
}
return null;
}
private String getFatalConnectionString(final int version, final byte authByte) {
return "serverVersion=" + RpcServer.CURRENT_VERSION + ", clientVersion=" + version
+ ", authMethod=" + authByte +
// The provider may be null if we failed to parse the header of the request
", authName=" + (provider == null ? "unknown" : provider.getSaslAuthMethod().getName())
+ " from " + toString();
}
/**
* Set up cell block codecs
*/
private void setupCellBlockCodecs() throws FatalConnectionException {
// TODO: Plug in other supported decoders.
if (!connectionHeader.hasCellBlockCodecClass()) {
return;
}
String className = connectionHeader.getCellBlockCodecClass();
if (className == null || className.length() == 0) {
return;
}
try {
this.codec = (Codec) Class.forName(className).getDeclaredConstructor().newInstance();
} catch (Exception e) {
throw new UnsupportedCellCodecException(className, e);
}
if (!connectionHeader.hasCellBlockCompressorClass()) {
return;
}
className = connectionHeader.getCellBlockCompressorClass();
try {
this.compressionCodec =
(CompressionCodec) Class.forName(className).getDeclaredConstructor().newInstance();
} catch (Exception e) {
throw new UnsupportedCompressionCodecException(className, e);
}
}
/**
* Set up cipher for rpc encryption with Apache Commons Crypto.
*/
private Pair<RPCProtos.ConnectionHeaderResponse, CryptoAES> setupCryptoCipher()
throws FatalConnectionException {
// If simple auth, return
if (saslServer == null) {
return null;
}
// check if rpc encryption with Crypto AES
String qop = saslServer.getNegotiatedQop();
boolean isEncryption = SaslUtil.QualityOfProtection.PRIVACY.getSaslQop().equalsIgnoreCase(qop);
boolean isCryptoAesEncryption = isEncryption
&& this.rpcServer.conf.getBoolean("hbase.rpc.crypto.encryption.aes.enabled", false);
if (!isCryptoAesEncryption) {
return null;
}
if (!connectionHeader.hasRpcCryptoCipherTransformation()) {
return null;
}
String transformation = connectionHeader.getRpcCryptoCipherTransformation();
if (transformation == null || transformation.length() == 0) {
return null;
}
// Negotiates AES based on complete saslServer.
// The Crypto metadata need to be encrypted and send to client.
Properties properties = new Properties();
// the property for SecureRandomFactory
properties.setProperty(CryptoRandomFactory.CLASSES_KEY,
this.rpcServer.conf.get("hbase.crypto.sasl.encryption.aes.crypto.random",
"org.apache.commons.crypto.random.JavaCryptoRandom"));
// the property for cipher class
properties.setProperty(CryptoCipherFactory.CLASSES_KEY,
this.rpcServer.conf.get("hbase.rpc.crypto.encryption.aes.cipher.class",
"org.apache.commons.crypto.cipher.JceCipher"));
int cipherKeyBits =
this.rpcServer.conf.getInt("hbase.rpc.crypto.encryption.aes.cipher.keySizeBits", 128);
// generate key and iv
if (cipherKeyBits % 8 != 0) {
throw new IllegalArgumentException(
"The AES cipher key size in bits" + " should be a multiple of byte");
}
int len = cipherKeyBits / 8;
byte[] inKey = new byte[len];
byte[] outKey = new byte[len];
byte[] inIv = new byte[len];
byte[] outIv = new byte[len];
CryptoAES cryptoAES;
try {
// generate the cipher meta data with SecureRandom
CryptoRandom secureRandom = CryptoRandomFactory.getCryptoRandom(properties);
secureRandom.nextBytes(inKey);
secureRandom.nextBytes(outKey);
secureRandom.nextBytes(inIv);
secureRandom.nextBytes(outIv);
// create CryptoAES for server
cryptoAES = new CryptoAES(transformation, properties, inKey, outKey, inIv, outIv);
} catch (GeneralSecurityException | IOException ex) {
throw new UnsupportedCryptoException(ex.getMessage(), ex);
}
// create SaslCipherMeta and send to client,
// for client, the [inKey, outKey], [inIv, outIv] should be reversed
RPCProtos.CryptoCipherMeta.Builder ccmBuilder = RPCProtos.CryptoCipherMeta.newBuilder();
ccmBuilder.setTransformation(transformation);
ccmBuilder.setInIv(getByteString(outIv));
ccmBuilder.setInKey(getByteString(outKey));
ccmBuilder.setOutIv(getByteString(inIv));
ccmBuilder.setOutKey(getByteString(inKey));
RPCProtos.ConnectionHeaderResponse resp =
RPCProtos.ConnectionHeaderResponse.newBuilder().setCryptoCipherMeta(ccmBuilder).build();
return Pair.newPair(resp, cryptoAES);
}
private ByteString getByteString(byte[] bytes) {
// return singleton to reduce object allocation
return (bytes.length == 0) ? ByteString.EMPTY : ByteString.copyFrom(bytes);
}
private UserGroupInformation createUser(ConnectionHeader head) {
UserGroupInformation ugi = null;
if (!head.hasUserInfo()) {
return null;
}
UserInformation userInfoProto = head.getUserInfo();
String effectiveUser = null;
if (userInfoProto.hasEffectiveUser()) {
effectiveUser = userInfoProto.getEffectiveUser();
}
String realUser = null;
if (userInfoProto.hasRealUser()) {
realUser = userInfoProto.getRealUser();
}
if (effectiveUser != null) {
if (realUser != null) {
UserGroupInformation realUserUgi = UserGroupInformation.createRemoteUser(realUser);
ugi = UserGroupInformation.createProxyUser(effectiveUser, realUserUgi);
} else {
ugi = UserGroupInformation.createRemoteUser(effectiveUser);
}
}
return ugi;
}
protected final void disposeSasl() {
if (saslServer != null) {
saslServer.dispose();
saslServer = null;
}
}
/**
* No protobuf encoding of raw sasl messages
*/
protected final void doRawSaslReply(SaslStatus status, Writable rv, String errorClass,
String error) throws IOException {
BufferChain bc;
// In my testing, have noticed that sasl messages are usually
// in the ballpark of 100-200. That's why the initial capacity is 256.
try (ByteBufferOutputStream saslResponse = new ByteBufferOutputStream(256);
DataOutputStream out = new DataOutputStream(saslResponse)) {
out.writeInt(status.state); // write status
if (status == SaslStatus.SUCCESS) {
rv.write(out);
} else {
WritableUtils.writeString(out, errorClass);
WritableUtils.writeString(out, error);
}
bc = new BufferChain(saslResponse.getByteBuffer());
}
doRespond(() -> bc);
}
HBaseSaslRpcServer getOrCreateSaslServer() throws IOException {
if (saslServer == null) {
saslServer = new HBaseSaslRpcServer(provider, rpcServer.saslProps, rpcServer.secretManager);
}
return saslServer;
}
void finishSaslNegotiation() throws IOException {
String negotiatedQop = saslServer.getNegotiatedQop();
SaslUtil.verifyNegotiatedQop(saslServer.getRequestedQop(), negotiatedQop);
ugi = provider.getAuthorizedUgi(saslServer.getAuthorizationID(), this.rpcServer.secretManager);
RpcServer.LOG.debug(
"SASL server context established. Authenticated client: {}. Negotiated QoP is {}", ugi,
negotiatedQop);
rpcServer.metrics.authenticationSuccess();
RpcServer.AUDITLOG.info(RpcServer.AUTH_SUCCESSFUL_FOR + ugi);
}
public void processOneRpc(ByteBuff buf) throws IOException, InterruptedException {
if (connectionHeaderRead) {
processRequest(buf);
} else {
processConnectionHeader(buf);
callCleanupIfNeeded();
this.connectionHeaderRead = true;
this.rpcServer.getRpcCoprocessorHost().preAuthorizeConnection(connectionHeader, addr);
if (rpcServer.needAuthorization() && !authorizeConnection()) {
// Throw FatalConnectionException wrapping ACE so client does right thing and closes
// down the connection instead of trying to read non-existent retun.
throw new AccessDeniedException("Connection from " + this + " for service "
+ connectionHeader.getServiceName() + " is unauthorized for user: " + ugi);
}
this.user = this.rpcServer.userProvider.create(this.ugi);
this.rpcServer.getRpcCoprocessorHost().postAuthorizeConnection(
this.user != null ? this.user.getName() : null, this.clientCertificateChain);
}
}
private boolean authorizeConnection() throws IOException {
try {
// If auth method is DIGEST, the token was obtained by the
// real user for the effective user, therefore not required to
// authorize real user. doAs is allowed only for simple or kerberos
// authentication
if (ugi != null && ugi.getRealUser() != null && provider.supportsProtocolAuthentication()) {
ProxyUsers.authorize(ugi, this.getHostAddress(), this.rpcServer.conf);
}
this.rpcServer.authorize(ugi, connectionHeader, getHostInetAddress());
this.rpcServer.metrics.authorizationSuccess();
} catch (AuthorizationException ae) {
if (RpcServer.LOG.isDebugEnabled()) {
RpcServer.LOG.debug("Connection authorization failed: " + ae.getMessage(), ae);
}
this.rpcServer.metrics.authorizationFailure();
doRespond(getErrorResponse(ae.getMessage(), new AccessDeniedException(ae)));
return false;
}
return true;
}
private CodedInputStream createCis(ByteBuff buf) {
// Here we read in the header. We avoid having pb
// do its default 4k allocation for CodedInputStream. We force it to use
// backing array.
CodedInputStream cis;
if (buf.hasArray()) {
cis = UnsafeByteOperations
.unsafeWrap(buf.array(), buf.arrayOffset() + buf.position(), buf.limit()).newCodedInput();
} else {
cis = UnsafeByteOperations.unsafeWrap(new ByteBuffByteInput(buf, buf.limit()), 0, buf.limit())
.newCodedInput();
}
cis.enableAliasing(true);
return cis;
}
// Reads the connection header following version
private void processConnectionHeader(ByteBuff buf) throws IOException {
this.connectionHeader = ConnectionHeader.parseFrom(createCis(buf));
// we want to copy the attributes prior to releasing the buffer so that they don't get corrupted
// eventually
if (connectionHeader.getAttributeList().isEmpty()) {
this.connectionAttributes = Collections.emptyMap();
} else {
this.connectionAttributes =
Maps.newHashMapWithExpectedSize(connectionHeader.getAttributeList().size());
for (HBaseProtos.NameBytesPair nameBytesPair : connectionHeader.getAttributeList()) {
this.connectionAttributes.put(nameBytesPair.getName(),
nameBytesPair.getValue().toByteArray());
}
}
String serviceName = connectionHeader.getServiceName();
if (serviceName == null) {
throw new EmptyServiceNameException();
}
this.service = RpcServer.getService(this.rpcServer.services, serviceName);
if (this.service == null) {
throw new UnknownServiceException(serviceName);
}
setupCellBlockCodecs();
sendConnectionHeaderResponseIfNeeded();
UserGroupInformation protocolUser = createUser(connectionHeader);
if (!useSasl) {
ugi = protocolUser;
if (ugi != null) {
ugi.setAuthenticationMethod(AuthenticationMethod.SIMPLE);
}
// audit logging for SASL authenticated users happens in saslReadAndProcess()
if (authenticatedWithFallback) {
RpcServer.LOG.warn("Allowed fallback to SIMPLE auth for {} connecting from {}", ugi,
getHostAddress());
}
} else {
// user is authenticated
ugi.setAuthenticationMethod(provider.getSaslAuthMethod().getAuthMethod());
// Now we check if this is a proxy user case. If the protocol user is
// different from the 'user', it is a proxy user scenario. However,
// this is not allowed if user authenticated with DIGEST.
if ((protocolUser != null) && (!protocolUser.getUserName().equals(ugi.getUserName()))) {
if (!provider.supportsProtocolAuthentication()) {
// Not allowed to doAs if token authentication is used
throw new AccessDeniedException("Authenticated user (" + ugi
+ ") doesn't match what the client claims to be (" + protocolUser + ")");
} else {
// Effective user can be different from authenticated user
// for simple auth or kerberos auth
// The user is the real user. Now we create a proxy user
UserGroupInformation realUser = ugi;
ugi = UserGroupInformation.createProxyUser(protocolUser.getUserName(), realUser);
// Now the user is a proxy user, set Authentication method Proxy.
ugi.setAuthenticationMethod(AuthenticationMethod.PROXY);
}
}
}
String version;
if (this.connectionHeader.hasVersionInfo()) {
// see if this connection will support RetryImmediatelyException
this.retryImmediatelySupported = VersionInfoUtil.hasMinimumVersion(getVersionInfo(), 1, 2);
version = this.connectionHeader.getVersionInfo().getVersion();
} else {
version = "UNKNOWN";
}
RpcServer.AUDITLOG.info("Connection from {}:{}, version={}, sasl={}, ugi={}, service={}",
this.hostAddress, this.remotePort, version, this.useSasl, this.ugi, serviceName);
}
/**
* Send the response for connection header
*/
private void sendConnectionHeaderResponseIfNeeded() throws FatalConnectionException {
Pair<RPCProtos.ConnectionHeaderResponse, CryptoAES> pair = setupCryptoCipher();
// Response the connection header if Crypto AES is enabled
if (pair == null) {
return;
}
try {
int size = pair.getFirst().getSerializedSize();
BufferChain bc;
try (ByteBufferOutputStream bbOut = new ByteBufferOutputStream(4 + size);
DataOutputStream out = new DataOutputStream(bbOut)) {
out.writeInt(size);
pair.getFirst().writeTo(out);
bc = new BufferChain(bbOut.getByteBuffer());
}
doRespond(new RpcResponse() {
@Override
public BufferChain getResponse() {
return bc;
}
@Override
public void done() {
// must switch after sending the connection header response, as the client still uses the
// original SaslClient to unwrap the data we send back
saslServer.switchToCryptoAES(pair.getSecond());
}
});
} catch (IOException ex) {
throw new UnsupportedCryptoException(ex.getMessage(), ex);
}
}
protected abstract void doRespond(RpcResponse resp) throws IOException;
/**
* Has the request header and the request param and optionally encoded data buffer all in this one
* array.
* <p/>
* Will be overridden in tests.
*/
protected void processRequest(ByteBuff buf) throws IOException, InterruptedException {
long totalRequestSize = buf.limit();
int offset = 0;
// Here we read in the header. We avoid having pb
// do its default 4k allocation for CodedInputStream. We force it to use
// backing array.
CodedInputStream cis = createCis(buf);
int headerSize = cis.readRawVarint32();
offset = cis.getTotalBytesRead();
Message.Builder builder = RequestHeader.newBuilder();
ProtobufUtil.mergeFrom(builder, cis, headerSize);
RequestHeader header = (RequestHeader) builder.build();
offset += headerSize;
Context traceCtx = GlobalOpenTelemetry.getPropagators().getTextMapPropagator()
.extract(Context.current(), header.getTraceInfo(), getter);
// n.b. Management of this Span instance is a little odd. Most exit paths from this try scope
// are early-exits due to error cases. There's only one success path, the asynchronous call to
// RpcScheduler#dispatch. The success path assumes ownership of the span, which is represented
// by null-ing out the reference in this scope. All other paths end the span. Thus, and in
// order to avoid accidentally orphaning the span, the call to Span#end happens in a finally
// block iff the span is non-null.
Span span = TraceUtil.createRemoteSpan("RpcServer.process", traceCtx);
try (Scope ignored = span.makeCurrent()) {
int id = header.getCallId();
// HBASE-28128 - if server is aborting, don't bother trying to process. It will
// fail at the handler layer, but worse might result in CallQueueTooBigException if the
// queue is full but server is not properly processing requests. Better to throw an aborted
// exception here so that the client can properly react.
if (rpcServer.server != null && rpcServer.server.isAborted()) {
RegionServerAbortedException serverIsAborted = new RegionServerAbortedException(
"Server " + rpcServer.server.getServerName() + " aborting");
this.rpcServer.metrics.exception(serverIsAborted);
sendErrorResponseForCall(id, totalRequestSize, span, serverIsAborted.getMessage(),
serverIsAborted);
return;
}
if (RpcServer.LOG.isTraceEnabled()) {
RpcServer.LOG.trace("RequestHeader " + TextFormat.shortDebugString(header)
+ " totalRequestSize: " + totalRequestSize + " bytes");
}
// Enforcing the call queue size, this triggers a retry in the client
// This is a bit late to be doing this check - we have already read in the
// total request.
if (
(totalRequestSize + this.rpcServer.callQueueSizeInBytes.sum())
> this.rpcServer.maxQueueSizeInBytes
) {
this.rpcServer.metrics.exception(RpcServer.CALL_QUEUE_TOO_BIG_EXCEPTION);
sendErrorResponseForCall(id, totalRequestSize, span,
"Call queue is full on " + this.rpcServer.server.getServerName()
+ ", is hbase.ipc.server.max.callqueue.size too small?",
RpcServer.CALL_QUEUE_TOO_BIG_EXCEPTION);
return;
}
MethodDescriptor md = null;
Message param = null;
ExtendedCellScanner cellScanner = null;
try {
if (header.hasRequestParam() && header.getRequestParam()) {
md = this.service.getDescriptorForType().findMethodByName(header.getMethodName());
if (md == null) {
throw new UnsupportedOperationException(header.getMethodName());
}
builder = this.service.getRequestPrototype(md).newBuilderForType();
cis.resetSizeCounter();
int paramSize = cis.readRawVarint32();
offset += cis.getTotalBytesRead();
if (builder != null) {
ProtobufUtil.mergeFrom(builder, cis, paramSize);
param = builder.build();
}
offset += paramSize;
} else {
// currently header must have request param, so we directly throw
// exception here
String msg = "Invalid request header: " + TextFormat.shortDebugString(header)
+ ", should have param set in it";
RpcServer.LOG.warn(msg);
throw new DoNotRetryIOException(msg);
}
if (header.hasCellBlockMeta()) {
buf.position(offset);
ByteBuff dup = buf.duplicate();
dup.limit(offset + header.getCellBlockMeta().getLength());
cellScanner = this.rpcServer.cellBlockBuilder.createCellScannerReusingBuffers(this.codec,
this.compressionCodec, dup);
}
} catch (Throwable thrown) {
InetSocketAddress address = this.rpcServer.getListenerAddress();
String msg = (address != null ? address : "(channel closed)")
+ " is unable to read call parameter from client " + getHostAddress();
RpcServer.LOG.warn(msg, thrown);
this.rpcServer.metrics.exception(thrown);
final Throwable responseThrowable;
if (thrown instanceof LinkageError) {
// probably the hbase hadoop version does not match the running hadoop version
responseThrowable = new DoNotRetryIOException(thrown);
} else if (thrown instanceof UnsupportedOperationException) {
// If the method is not present on the server, do not retry.
responseThrowable = new DoNotRetryIOException(thrown);
} else {
responseThrowable = thrown;
}
sendErrorResponseForCall(id, totalRequestSize, span,
msg + "; " + responseThrowable.getMessage(), responseThrowable);
return;
}
int timeout = 0;
if (header.hasTimeout() && header.getTimeout() > 0) {
timeout = Math.max(this.rpcServer.minClientRequestTimeout, header.getTimeout());
}
ServerCall<?> call = createCall(id, this.service, md, header, param, cellScanner,
totalRequestSize, this.addr, timeout, this.callCleanup);
if (this.rpcServer.scheduler.dispatch(new CallRunner(this.rpcServer, call))) {
// unset span do that it's not closed in the finally block
span = null;
} else {
this.rpcServer.callQueueSizeInBytes.add(-1 * call.getSize());
this.rpcServer.metrics.exception(RpcServer.CALL_QUEUE_TOO_BIG_EXCEPTION);
call.setResponse(null, null, RpcServer.CALL_QUEUE_TOO_BIG_EXCEPTION,
"Call queue is full on " + this.rpcServer.server.getServerName()
+ ", too many items queued ?");
TraceUtil.setError(span, RpcServer.CALL_QUEUE_TOO_BIG_EXCEPTION);
call.sendResponseIfReady();
}
} finally {
if (span != null) {
span.end();
}
}
}
private void sendErrorResponseForCall(int id, long totalRequestSize, Span span, String msg,
Throwable responseThrowable) throws IOException {
ServerCall<?> failedcall = createCall(id, this.service, null, null, null, null,
totalRequestSize, null, 0, this.callCleanup);
failedcall.setResponse(null, null, responseThrowable, msg);
TraceUtil.setError(span, responseThrowable);
failedcall.sendResponseIfReady();
}
protected final RpcResponse getErrorResponse(String msg, Exception e) throws IOException {
ResponseHeader.Builder headerBuilder = ResponseHeader.newBuilder().setCallId(-1);
ServerCall.setExceptionResponse(e, msg, headerBuilder);
ByteBuffer headerBuf =
ServerCall.createHeaderAndMessageBytes(null, headerBuilder.build(), 0, null);
BufferChain buf = new BufferChain(headerBuf);
return () -> buf;
}
private void doBadPreambleHandling(String msg) throws IOException {
doBadPreambleHandling(msg, new FatalConnectionException(msg));
}
private void doBadPreambleHandling(String msg, Exception e) throws IOException {
RpcServer.LOG.warn(msg, e);
doRespond(getErrorResponse(msg, e));
}
private void doPreambleResponse(Message resp) throws IOException {
ResponseHeader header = ResponseHeader.newBuilder().setCallId(-1).build();
ByteBuffer buf = ServerCall.createHeaderAndMessageBytes(resp, header, 0, null);
BufferChain bufChain = new BufferChain(buf);
doRespond(() -> bufChain);
}
private boolean doConnectionRegistryResponse() throws IOException {
if (!(rpcServer.server instanceof ConnectionRegistryEndpoint)) {
// should be in tests or some scenarios where we should not reach here
return false;
}
// on backup masters, this request may be blocked since we need to fetch it from filesystem,
// but since it is just backup master, it is not a critical problem
String clusterId = ((ConnectionRegistryEndpoint) rpcServer.server).getClusterId();
RpcServer.LOG.debug("Response connection registry, clusterId = '{}'", clusterId);
if (clusterId == null) {
// should be in tests or some scenarios where we should not reach here
return false;
}
GetConnectionRegistryResponse resp =
GetConnectionRegistryResponse.newBuilder().setClusterId(clusterId).build();
doPreambleResponse(resp);
return true;
}
private void doSecurityPreambleResponse() throws IOException {
if (rpcServer.isSecurityEnabled) {
SecurityPreamableResponse resp = SecurityPreamableResponse.newBuilder()
.setServerPrincipal(rpcServer.serverPrincipal).build();
doPreambleResponse(resp);
} else {
// security is not enabled, do not need a principal when connecting, throw a special exception
// to let client know it should just use simple authentication
doRespond(getErrorResponse("security is not enabled", new SecurityNotEnabledException()));
}
}
protected final void callCleanupIfNeeded() {
if (callCleanup != null) {
callCleanup.run();
callCleanup = null;
}
}
protected enum PreambleResponse {
SUCCEED, // successfully processed the rpc preamble header
CONTINUE, // the preamble header is for other purpose, wait for the rpc preamble header
CLOSE // close the rpc connection
}
protected final PreambleResponse processPreamble(ByteBuffer preambleBuffer) throws IOException {
assert preambleBuffer.remaining() == 6;
if (
ByteBufferUtils.equals(preambleBuffer, preambleBuffer.position(), 6,
RpcClient.REGISTRY_PREAMBLE_HEADER, 0, 6) && doConnectionRegistryResponse()
) {
return PreambleResponse.CLOSE;
}
if (
ByteBufferUtils.equals(preambleBuffer, preambleBuffer.position(), 6,
RpcClient.SECURITY_PREAMBLE_HEADER, 0, 6)
) {
doSecurityPreambleResponse();
return PreambleResponse.CONTINUE;
}
if (!ByteBufferUtils.equals(preambleBuffer, preambleBuffer.position(), 4, RPC_HEADER, 0, 4)) {
doBadPreambleHandling(
"Expected HEADER=" + Bytes.toStringBinary(RPC_HEADER) + " but received HEADER="
+ Bytes.toStringBinary(
ByteBufferUtils.toBytes(preambleBuffer, preambleBuffer.position(), RPC_HEADER.length),
0, RPC_HEADER.length)
+ " from " + toString());
return PreambleResponse.CLOSE;
}
int version = preambleBuffer.get(preambleBuffer.position() + 4) & 0xFF;
byte authByte = preambleBuffer.get(preambleBuffer.position() + 5);
if (version != RpcServer.CURRENT_VERSION) {
String msg = getFatalConnectionString(version, authByte);
doBadPreambleHandling(msg, new WrongVersionException(msg));
return PreambleResponse.CLOSE;
}
this.provider = this.saslProviders.selectProvider(authByte);
if (this.provider == null) {
String msg = getFatalConnectionString(version, authByte);
doBadPreambleHandling(msg, new BadAuthException(msg));
return PreambleResponse.CLOSE;
}
// TODO this is a wart while simple auth'n doesn't go through sasl.
if (this.rpcServer.isSecurityEnabled && isSimpleAuthentication()) {
if (this.rpcServer.allowFallbackToSimpleAuth) {
this.rpcServer.metrics.authenticationFallback();
authenticatedWithFallback = true;
} else {
AccessDeniedException ae = new AccessDeniedException("Authentication is required");
doRespond(getErrorResponse(ae.getMessage(), ae));
return PreambleResponse.CLOSE;
}
}
if (!this.rpcServer.isSecurityEnabled && !isSimpleAuthentication()) {
doRawSaslReply(SaslStatus.SUCCESS, new IntWritable(SaslUtil.SWITCH_TO_SIMPLE_AUTH), null,
null);
provider = saslProviders.getSimpleProvider();
// client has already sent the initial Sasl message and we
// should ignore it. Both client and server should fall back
// to simple auth from now on.
skipInitialSaslHandshake = true;
}
useSasl = !(provider instanceof SimpleSaslServerAuthenticationProvider);
return PreambleResponse.SUCCEED;
}
boolean isSimpleAuthentication() {
return Objects.requireNonNull(provider) instanceof SimpleSaslServerAuthenticationProvider;
}
public abstract boolean isConnectionOpen();
public abstract ServerCall<?> createCall(int id, BlockingService service, MethodDescriptor md,
RequestHeader header, Message param, ExtendedCellScanner cellScanner, long size,
InetAddress remoteAddress, int timeout, CallCleanup reqCleanup);
private static class ByteBuffByteInput extends ByteInput {
private ByteBuff buf;
private int length;
ByteBuffByteInput(ByteBuff buf, int length) {
this.buf = buf;
this.length = length;
}
@Override
public byte read(int offset) {
return this.buf.get(offset);
}
@Override
public int read(int offset, byte[] out, int outOffset, int len) {
this.buf.get(offset, out, outOffset, len);
return len;
}
@Override
public int read(int offset, ByteBuffer out) {
int len = out.remaining();
this.buf.get(out, offset, len);
return len;
}
@Override
public int size() {
return this.length;
}
}
}
|
apache/lucene | 36,045 | lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/TestCompositeGeoPolygonRelationships.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.spatial3d.geom;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.lucene.tests.util.LuceneTestCase;
import org.junit.Test;
/**
* Check relationship between polygon and GeoShapes of composite polygons. Normally we construct the
* composite polygon (when possible) and the complex one.
*/
public class TestCompositeGeoPolygonRelationships extends LuceneTestCase {
@Test
public void testGeoCompositePolygon1() {
// POLYGON ((19.845091 -60.452631, 20.119948 -61.655652, 23.207901 -61.453298, 22.820804
// -60.257713, 21 -61,19.845091 -60.452631))
GeoPolygon originalConvexPol =
buildGeoPolygon(
19.84509,
-60.452631,
20.119948,
-61.655652,
23.207901,
-61.453298,
22.820804,
-60.257713,
21,
-61);
// POLYGON ((19.845091 -60.452631, 21 -61,22.820804 -60.257713,23.207901 -61.453298, 20.119948
// -61.655652, 19.845091 -60.452631))
GeoPolygon originalConcavePol =
buildGeoPolygon(
19.84509,
-60.452631,
21,
-61,
22.820804,
-60.257713,
23.207901,
-61.453298,
20.119948,
-61.655652);
GeoPolygon polConvex =
buildGeoPolygon(20.0, -60.4, 20.1, -60.4, 20.1, -60.3, 20.0, -60.3, 20.0, -60.3);
GeoPolygon polConcave =
buildConcaveGeoPolygon(20.0, -60.4, 20.1, -60.4, 20.1, -60.3, 20.0, -60.3);
// convex
int rel = originalConvexPol.getRelationship(polConvex);
assertEquals(GeoArea.DISJOINT, rel);
rel = polConvex.getRelationship(originalConvexPol);
assertEquals(GeoArea.DISJOINT, rel);
rel = originalConvexPol.getRelationship(polConcave);
assertEquals(GeoArea.CONTAINS, rel);
rel = polConcave.getRelationship(originalConvexPol);
assertEquals(GeoArea.WITHIN, rel);
// concave
rel = originalConcavePol.getRelationship(polConvex);
assertEquals(GeoArea.WITHIN, rel);
rel = polConvex.getRelationship(originalConcavePol);
assertEquals(GeoArea.CONTAINS, rel);
rel = originalConcavePol.getRelationship(polConcave);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConcave.getRelationship(originalConcavePol);
assertEquals(GeoArea.OVERLAPS, rel);
}
@Test
public void testGeoCompositePolygon2() {
// POLYGON ((19.845091 -60.452631, 20.119948 -61.655652, 23.207901 -61.453298, 22.820804
// -60.257713, 21 -61,19.845091 -60.452631))
GeoPolygon originalConvexPol =
buildGeoPolygon(
19.84509,
-60.452631,
20.119948,
-61.655652,
23.207901,
-61.453298,
22.820804,
-60.257713,
21,
-61);
// POLYGON ((19.845091 -60.452631, 21 -61,22.820804 -60.257713,23.207901 -61.453298, 20.119948
// -61.655652, 19.845091 -60.452631))
GeoPolygon originalConcavePol =
buildGeoPolygon(
19.84509,
-60.452631,
21,
-61,
22.820804,
-60.257713,
23.207901,
-61.453298,
20.119948,
-61.655652);
// POLYGON ((20.9 -60.8, 21.1 -60.8, 21.1 -60.6, 20.9 -60.6,20.9 -60.8))
GeoPolygon polConvex =
buildGeoPolygon(20.9, -60.8, 21.1, -60.8, 21.1, -60.6, 20.9, -60.6, 20.9, -60.6);
GeoPolygon polConcave =
buildConcaveGeoPolygon(20.9, -60.8, 21.1, -60.8, 21.1, -60.6, 20.9, -60.6);
// convex
int rel = originalConvexPol.getRelationship(polConvex);
assertEquals(GeoArea.DISJOINT, rel);
rel = polConvex.getRelationship(originalConvexPol);
assertEquals(GeoArea.DISJOINT, rel);
rel = originalConvexPol.getRelationship(polConcave);
assertEquals(GeoArea.CONTAINS, rel);
rel = polConcave.getRelationship(originalConvexPol);
assertEquals(GeoArea.WITHIN, rel);
// concave
rel = originalConcavePol.getRelationship(polConvex);
assertEquals(GeoArea.WITHIN, rel);
rel = polConvex.getRelationship(originalConcavePol);
assertEquals(GeoArea.CONTAINS, rel);
rel = originalConcavePol.getRelationship(polConcave);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConcave.getRelationship(originalConcavePol);
assertEquals(GeoArea.OVERLAPS, rel);
}
@Test
public void testGeoCompositePolygon3() {
// POLYGON ((19.845091 -60.452631, 20.119948 -61.655652, 23.207901 -61.453298, 22.820804
// -60.257713, 21 -61,19.845091 -60.452631))
GeoPolygon originalConvexPol =
buildGeoPolygon(
19.84509,
-60.452631,
20.119948,
-61.655652,
23.207901,
-61.453298,
22.820804,
-60.257713,
21,
-61);
// POLYGON ((19.845091 -60.452631, 21 -61,22.820804 -60.257713,23.207901 -61.453298, 20.119948
// -61.655652, 19.845091 -60.452631))
GeoPolygon originalConcavePol =
buildGeoPolygon(
19.84509,
-60.452631,
21,
-61,
22.820804,
-60.257713,
23.207901,
-61.453298,
20.119948,
-61.655652);
// POLYGON ((20.9 -61.1, 21.1 -61.1, 21.1 -60.9, 20.9 -60.9,20.9 -61.1))
GeoPolygon polConvex =
buildGeoPolygon(20.9, -61.1, 21.1, -61.1, 21.1, -60.9, 20.9, -60.9, 20.9, -60.9);
GeoPolygon polConcave =
buildConcaveGeoPolygon(20.9, -61.1, 21.1, -61.1, 21.1, -60.9, 20.9, -60.9);
// convex
int rel = originalConvexPol.getRelationship(polConvex);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConvex.getRelationship(originalConvexPol);
assertEquals(GeoArea.OVERLAPS, rel);
rel = originalConvexPol.getRelationship(polConcave);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConcave.getRelationship(originalConvexPol);
assertEquals(GeoArea.OVERLAPS, rel);
// concave
rel = originalConcavePol.getRelationship(polConvex);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConvex.getRelationship(originalConcavePol);
assertEquals(GeoArea.OVERLAPS, rel);
rel = originalConcavePol.getRelationship(polConcave);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConcave.getRelationship(originalConcavePol);
assertEquals(GeoArea.OVERLAPS, rel);
}
@Test
public void testGeoCompositePolygon4() {
// POLYGON ((19.845091 -60.452631, 20.119948 -61.655652, 23.207901 -61.453298, 22.820804
// -60.257713, 21 -61,19.845091 -60.452631))
GeoPolygon originalConvexPol =
buildGeoPolygon(
19.84509,
-60.452631,
20.119948,
-61.655652,
23.207901,
-61.453298,
22.820804,
-60.257713,
21,
-61);
// POLYGON ((19.845091 -60.452631, 21 -61,22.820804 -60.257713,23.207901 -61.453298, 20.119948
// -61.655652, 19.845091 -60.452631))
GeoPolygon originalConcavePol =
buildGeoPolygon(
19.84509,
-60.452631,
21,
-61,
22.820804,
-60.257713,
23.207901,
-61.453298,
20.119948,
-61.655652);
// POLYGON ((20.9 -61.4, 21.1 -61.4, 21.1 -61.2, 20.9 -61.2,20.9 -61.4))
GeoPolygon polConvex =
buildGeoPolygon(20.9, -61.4, 21.1, -61.4, 21.1, -61.2, 20.9, -61.2, 20.9, -61.2);
GeoPolygon polConcave =
buildConcaveGeoPolygon(20.9, -61.4, 21.1, -61.4, 21.1, -61.2, 20.9, -61.2);
// convex
int rel = originalConvexPol.getRelationship(polConvex);
assertEquals(GeoArea.WITHIN, rel);
rel = polConvex.getRelationship(originalConvexPol);
assertEquals(GeoArea.CONTAINS, rel);
rel = originalConvexPol.getRelationship(polConcave);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConcave.getRelationship(originalConvexPol);
assertEquals(GeoArea.OVERLAPS, rel);
// concave
rel = originalConcavePol.getRelationship(polConvex);
assertEquals(GeoArea.DISJOINT, rel);
rel = polConvex.getRelationship(originalConcavePol);
assertEquals(GeoArea.DISJOINT, rel);
rel = originalConcavePol.getRelationship(polConcave);
assertEquals(GeoArea.CONTAINS, rel);
rel = polConcave.getRelationship(originalConcavePol);
assertEquals(GeoArea.WITHIN, rel);
}
@Test
public void testGeoCompositePolygon5() {
// POLYGON ((19.845091 -60.452631, 20.119948 -61.655652, 23.207901 -61.453298, 22.820804
// -60.257713, 21 -61,19.845091 -60.452631))
GeoPolygon originaConvexlPol =
buildGeoPolygon(
19.84509,
-60.452631,
20.119948,
-61.655652,
23.207901,
-61.453298,
22.820804,
-60.257713,
21,
-61);
// POLYGON ((19.845091 -60.452631, 21 -61,22.820804 -60.257713,23.207901 -61.453298, 20.119948
// -61.655652, 19.845091 -60.452631))
GeoPolygon originalConcavePol =
buildGeoPolygon(
19.84509,
-60.452631,
21,
-61,
22.820804,
-60.257713,
23.207901,
-61.453298,
20.119948,
-61.655652);
// POLYGON ((19 -62, 23 -62, 23 -60, 19 -60,19 -62))
GeoPolygon polConvex = buildGeoPolygon(19, -62, 23, -62, 23, -60, 19, -60, 19, -60);
GeoPolygon polConcave = buildConcaveGeoPolygon(19, -62, 23, -62, 23, -60, 19, -60);
// convex
int rel = originaConvexlPol.getRelationship(polConvex);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConvex.getRelationship(originaConvexlPol);
assertEquals(GeoArea.OVERLAPS, rel);
rel = originaConvexlPol.getRelationship(polConcave);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConcave.getRelationship(originaConvexlPol);
assertEquals(GeoArea.OVERLAPS, rel);
// concave
rel = originalConcavePol.getRelationship(polConvex);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConvex.getRelationship(originalConcavePol);
assertEquals(GeoArea.OVERLAPS, rel);
rel = originalConcavePol.getRelationship(polConcave);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConcave.getRelationship(originalConcavePol);
assertEquals(GeoArea.OVERLAPS, rel);
}
@Test
public void testGeoCompositePolygon6() {
// POLYGON ((19.845091 -60.452631, 20.119948 -61.655652, 23.207901 -61.453298, 22.820804
// -60.257713, 21 -61,19.845091 -60.452631))
GeoPolygon originalConvexPol =
buildGeoPolygon(
19.84509,
-60.452631,
20.119948,
-61.655652,
23.207901,
-61.453298,
22.820804,
-60.257713,
21,
-61);
// POLYGON ((19.845091 -60.452631, 21 -61,22.820804 -60.257713,23.207901 -61.453298, 20.119948
// -61.655652, 19.845091 -60.452631))
GeoPolygon originalConcavePol =
buildGeoPolygon(
19.84509,
-60.452631,
21,
-61,
22.820804,
-60.257713,
23.207901,
-61.453298,
20.119948,
-61.655652);
// POLYGON ((19 -62, 24 -62, 24 -60, 19 -60,19 -62))
GeoPolygon polConvex = buildGeoPolygon(19, -62, 24, -62, 24, -60, 19, -60, 19, -60);
GeoPolygon polConcave = buildConcaveGeoPolygon(19, -62, 24, -62, 24, -60, 19, -60);
// convex
int rel = originalConvexPol.getRelationship(polConvex);
assertEquals(GeoArea.CONTAINS, rel);
rel = polConvex.getRelationship(originalConvexPol);
assertEquals(GeoArea.WITHIN, rel);
rel = originalConvexPol.getRelationship(polConcave);
assertEquals(GeoArea.DISJOINT, rel);
rel = polConcave.getRelationship(originalConvexPol);
assertEquals(GeoArea.DISJOINT, rel);
// concave
rel = originalConcavePol.getRelationship(polConvex);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConvex.getRelationship(originalConcavePol);
assertEquals(GeoArea.OVERLAPS, rel);
rel = originalConcavePol.getRelationship(polConcave);
assertEquals(GeoArea.WITHIN, rel);
rel = polConcave.getRelationship(originalConcavePol);
assertEquals(GeoArea.CONTAINS, rel);
}
@Test
public void testGeoCompositePolygon7() {
// POLYGON ((19.845091 -60.452631, 20.119948 -61.655652, 23.207901 -61.453298, 22.820804
// -60.257713, 21 -61,19.845091 -60.452631))
GeoPolygon originalConvexPol =
buildGeoPolygon(
19.84509,
-60.452631,
20.119948,
-61.655652,
23.207901,
-61.453298,
22.820804,
-60.257713,
21,
-61);
// POLYGON ((19.845091 -60.452631, 21 -61,22.820804 -60.257713,23.207901 -61.453298, 20.119948
// -61.655652, 19.845091 -60.452631))
GeoPolygon originalConcavePol =
buildGeoPolygon(
19.84509,
-60.452631,
21,
-61,
22.820804,
-60.257713,
23.207901,
-61.453298,
20.119948,
-61.655652);
// POLYGON ((20.2 -61.4, 20.5 -61.4, 20.5 -60.8, 20.2 -60.8,20.2 -61.4))
GeoPolygon polConvex =
buildGeoPolygon(20.2, -61.4, 20.5, -61.4, 20.5, -60.8, 20.2, -60.8, 20.2, -60.8);
GeoPolygon polConcave =
buildConcaveGeoPolygon(20.2, -61.4, 20.5, -61.4, 20.5, -60.8, 20.2, -60.8);
// convex
int rel = originalConvexPol.getRelationship(polConvex);
assertEquals(GeoArea.WITHIN, rel);
rel = polConvex.getRelationship(originalConvexPol);
assertEquals(GeoArea.CONTAINS, rel);
rel = originalConvexPol.getRelationship(polConcave);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConcave.getRelationship(originalConvexPol);
assertEquals(GeoArea.OVERLAPS, rel);
// concave
rel = originalConcavePol.getRelationship(polConvex);
assertEquals(GeoArea.DISJOINT, rel);
rel = polConvex.getRelationship(originalConvexPol);
assertEquals(GeoArea.CONTAINS, rel);
rel = originalConvexPol.getRelationship(polConcave);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConcave.getRelationship(originalConvexPol);
assertEquals(GeoArea.OVERLAPS, rel);
}
@Test
public void testGeoCompositePolygon8() {
// POLYGON ((19.845091 -60.452631, 20.119948 -61.655652, 23.207901 -61.453298, 22.820804
// -60.257713,21 -61, 19.845091 -60.452631))
GeoPolygon originalPol =
buildGeoPolygon(
19.84509,
-60.452631,
20.119948,
-61.655652,
23.207901,
-61.453298,
22.820804,
-60.257713,
21,
-61);
GeoShape shape = getInsideCompositeShape();
int rel = originalPol.getRelationship(shape);
assertEquals(GeoArea.WITHIN, rel);
}
@Test
public void testGeoPolygonPole1() {
// POLYGON((0 80, 45 85 ,90 80,135 85,180 80, -135 85, -90 80, -45 85,0 80))
GeoPolygon compositePol = getCompositePolygon();
GeoPolygon complexPol = getComplexPolygon();
// POLYGON ((20.9 -61.4, 21.1 -61.4, 21.1 -61.2, 20.9 -61.2,20.9 -61.4))
GeoPolygon polConvex =
buildGeoPolygon(20.9, -61.4, 21.1, -61.4, 21.1, -61.2, 20.9, -61.2, 20.9, -61.2);
GeoPolygon polConcave =
buildConcaveGeoPolygon(20.9, -61.4, 21.1, -61.4, 21.1, -61.2, 20.9, -61.2);
int rel = compositePol.getRelationship(polConvex);
assertEquals(GeoArea.DISJOINT, rel);
rel = polConvex.getRelationship(compositePol);
assertEquals(GeoArea.DISJOINT, rel);
rel = compositePol.getRelationship(polConcave);
assertEquals(GeoArea.CONTAINS, rel);
rel = polConcave.getRelationship(compositePol);
assertEquals(GeoArea.WITHIN, rel);
rel = complexPol.getRelationship(polConvex);
assertEquals(GeoArea.DISJOINT, rel);
rel = polConvex.getRelationship(complexPol);
assertEquals(GeoArea.DISJOINT, rel);
rel = complexPol.getRelationship(polConcave);
assertEquals(GeoArea.CONTAINS, rel);
rel = polConcave.getRelationship(complexPol);
assertEquals(GeoArea.WITHIN, rel);
}
@Test
public void testGeoPolygonPole2() {
// POLYGON((0 80, 45 85 ,90 80,135 85,180 80, -135 85, -90 80, -45 85,0 80))
GeoPolygon compositePol = getCompositePolygon();
GeoPolygon complexPol = getComplexPolygon();
// POLYGON((-1 81, -1 79,1 79,1 81, -1 81))
GeoPolygon polConvex = buildGeoPolygon(-1, 81, -1, 79, 1, 79, 1, 81, 1, 81);
GeoPolygon polConcave = buildConcaveGeoPolygon(-1, 81, -1, 79, 1, 79, 1, 81);
int rel = compositePol.getRelationship(polConvex);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConvex.getRelationship(compositePol);
assertEquals(GeoArea.OVERLAPS, rel);
rel = compositePol.getRelationship(polConcave);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConcave.getRelationship(compositePol);
assertEquals(GeoArea.OVERLAPS, rel);
rel = complexPol.getRelationship(polConvex);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConvex.getRelationship(complexPol);
assertEquals(GeoArea.OVERLAPS, rel);
rel = complexPol.getRelationship(polConcave);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConcave.getRelationship(complexPol);
assertEquals(GeoArea.OVERLAPS, rel);
}
@Test
public void testGeoPolygonPole3() {
// POLYGON((0 80, 45 85 ,90 80,135 85,180 80, -135 85, -90 80, -45 85,0 80))
GeoPolygon compositePol = getCompositePolygon();
GeoPolygon complexPol = getComplexPolygon();
// POLYGON((-1 86, -1 84,1 84,1 86, -1 86))
GeoPolygon polConvex = buildGeoPolygon(-1, 86, -1, 84, 1, 84, 1, 86, 1, 86);
GeoPolygon polConcave = buildConcaveGeoPolygon(-1, 86, -1, 84, 1, 84, 1, 86);
int rel = compositePol.getRelationship(polConvex);
assertEquals(GeoArea.WITHIN, rel);
rel = polConvex.getRelationship(compositePol);
assertEquals(GeoArea.CONTAINS, rel);
rel = compositePol.getRelationship(polConcave);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConcave.getRelationship(compositePol);
assertEquals(GeoArea.OVERLAPS, rel);
rel = complexPol.getRelationship(polConvex);
assertEquals(GeoArea.WITHIN, rel);
rel = polConvex.getRelationship(complexPol);
assertEquals(GeoArea.CONTAINS, rel);
rel = complexPol.getRelationship(polConcave);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConcave.getRelationship(complexPol);
assertEquals(GeoArea.OVERLAPS, rel);
}
@Test
public void testMultiPolygon1() {
// MULTIPOLYGON(((-145.790967486 -5.17543698881, -145.790854979 -5.11348060995, -145.853073512
// -5.11339421216, -145.853192037 -5.17535061936, -145.790967486 -5.17543698881)),
// ((-145.8563923 -5.17527125408, -145.856222168 -5.11332154814, -145.918433943 -5.11317773171,
// -145.918610092 -5.17512738429, -145.8563923 -5.17527125408)))
GeoPolygon multiPol = getMultiPolygon();
// POLYGON((-145.8555 -5.13, -145.8540 -5.13, -145.8540 -5.12, -145.8555 -5.12, -145.8555
// -5.13))
GeoPolygon polConvex =
buildGeoPolygon(
-145.8555, -5.13, -145.8540, -5.13, -145.8540, -5.12, -145.8555, -5.12, -145.8555,
-5.12);
GeoPolygon polConcave =
buildConcaveGeoPolygon(
-145.8555, -5.13, -145.8540, -5.13, -145.8540, -5.12, -145.8555, -5.12);
int rel = multiPol.getRelationship(polConvex);
assertEquals(GeoArea.DISJOINT, rel);
rel = polConvex.getRelationship(multiPol);
assertEquals(GeoArea.DISJOINT, rel);
assertEquals(false, multiPol.intersects(polConvex));
assertEquals(false, polConvex.intersects(multiPol));
rel = multiPol.getRelationship(polConcave);
assertEquals(GeoArea.CONTAINS, rel);
rel = polConcave.getRelationship(multiPol);
assertEquals(GeoArea.WITHIN, rel);
assertEquals(false, multiPol.intersects(polConcave));
assertEquals(false, polConcave.intersects(multiPol));
}
@Test
public void testMultiPolygon2() {
// MULTIPOLYGON(((-145.790967486 -5.17543698881, -145.790854979 -5.11348060995, -145.853073512
// -5.11339421216, -145.853192037 -5.17535061936, -145.790967486 -5.17543698881)),
// ((-145.8563923 -5.17527125408, -145.856222168 -5.11332154814, -145.918433943 -5.11317773171,
// -145.918610092 -5.17512738429, -145.8563923 -5.17527125408)))
GeoPolygon multiPol = getMultiPolygon();
// POLYGON((-145.8555 -5.13, -145.85 -5.13, -145.85 -5.12, -145.8555 -5.12, -145.8555 -5.13))
GeoPolygon polConvex =
buildGeoPolygon(
-145.8555, -5.13, -145.85, -5.13, -145.85, -5.12, -145.8555, -5.12, -145.8555, -5.12);
GeoPolygon polConcave =
buildConcaveGeoPolygon(-145.8555, -5.13, -145.85, -5.13, -145.85, -5.12, -145.8555, -5.12);
int rel = multiPol.getRelationship(polConvex);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConvex.getRelationship(multiPol);
assertEquals(GeoArea.OVERLAPS, rel);
assertEquals(true, multiPol.intersects(polConvex));
assertEquals(true, polConvex.intersects(multiPol));
rel = multiPol.getRelationship(polConcave);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConcave.getRelationship(multiPol);
assertEquals(GeoArea.OVERLAPS, rel);
assertEquals(true, multiPol.intersects(polConcave));
assertEquals(true, polConcave.intersects(multiPol));
}
@Test
public void testMultiPolygon3() {
// MULTIPOLYGON(((-145.790967486 -5.17543698881, -145.790854979 -5.11348060995, -145.853073512
// -5.11339421216, -145.853192037 -5.17535061936, -145.790967486 -5.17543698881)),
// ((-145.8563923 -5.17527125408, -145.856222168 -5.11332154814, -145.918433943 -5.11317773171,
// -145.918610092 -5.17512738429, -145.8563923 -5.17527125408)))
GeoPolygon multiPol = getMultiPolygon();
// POLYGON((-146 -5.18, -145.854 -5.18, -145.854 -5.11, -146 -5.11, -146 -5.18))
// Case overlapping one of the polygons so intersection is false!
GeoPolygon polConvex =
buildGeoPolygon(-146, -5.18, -145.854, -5.18, -145.854, -5.11, -146, -5.11, -146, -5.11);
GeoPolygon polConcave =
buildConcaveGeoPolygon(-146, -5.18, -145.854, -5.18, -145.854, -5.11, -146, -5.11);
int rel = multiPol.getRelationship(polConvex);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConvex.getRelationship(multiPol);
assertEquals(GeoArea.OVERLAPS, rel);
assertEquals(false, multiPol.intersects(polConvex));
assertEquals(false, polConvex.intersects(multiPol));
rel = multiPol.getRelationship(polConcave);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConcave.getRelationship(multiPol);
assertEquals(GeoArea.OVERLAPS, rel);
assertEquals(false, multiPol.intersects(polConcave));
assertEquals(false, polConcave.intersects(multiPol));
}
@Test
public void testMultiPolygon4() {
// MULTIPOLYGON(((-145.790967486 -5.17543698881, -145.790854979 -5.11348060995, -145.853073512
// -5.11339421216, -145.853192037 -5.17535061936, -145.790967486 -5.17543698881)),
// ((-145.8563923 -5.17527125408, -145.856222168 -5.11332154814, -145.918433943 -5.11317773171,
// -145.918610092 -5.17512738429, -145.8563923 -5.17527125408)))
GeoPolygon multiPol = getMultiPolygon();
// POLYGON((-145.88 -5.13, -145.87 -5.13, -145.87 -5.12, -145.88 -5.12, -145.88 -5.13))
GeoPolygon polConvex =
buildGeoPolygon(
-145.88, -5.13, -145.87, -5.13, -145.87, -5.12, -145.88, -5.12, -145.88, -5.12);
GeoPolygon polConcave =
buildConcaveGeoPolygon(-145.88, -5.13, -145.87, -5.13, -145.87, -5.12, -145.88, -5.12);
int rel = multiPol.getRelationship(polConvex);
assertEquals(GeoArea.WITHIN, rel);
rel = polConvex.getRelationship(multiPol);
assertEquals(GeoArea.CONTAINS, rel);
assertEquals(false, multiPol.intersects(polConvex));
assertEquals(false, polConvex.intersects(multiPol));
rel = multiPol.getRelationship(polConcave);
assertEquals(GeoArea.OVERLAPS, rel);
rel = polConcave.getRelationship(multiPol);
assertEquals(GeoArea.OVERLAPS, rel);
assertEquals(false, multiPol.intersects(polConcave));
assertEquals(false, polConcave.intersects(multiPol));
}
@Test
public void testMultiPolygon5() {
// MULTIPOLYGON(((-145.790967486 -5.17543698881, -145.790854979 -5.11348060995, -145.853073512
// -5.11339421216, -145.853192037 -5.17535061936, -145.790967486 -5.17543698881)),
// ((-145.8563923 -5.17527125408, -145.856222168 -5.11332154814, -145.918433943 -5.11317773171,
// -145.918610092 -5.17512738429, -145.8563923 -5.17527125408)))
GeoPolygon multiPol = getMultiPolygon();
// POLYGON((-146 -5.18, -145 -5.18, -145 -5.11, -146 -5.11, -146 -5.18))
GeoPolygon polConvex =
buildGeoPolygon(-146, -5.18, -145, -5.18, -145, -5.11, -146, -5.11, -146, -5.11);
GeoPolygon polConcave =
buildConcaveGeoPolygon(-146, -5.18, -145, -5.18, -145, -5.11, -146, -5.11);
int rel = multiPol.getRelationship(polConvex);
assertEquals(GeoArea.CONTAINS, rel);
rel = polConvex.getRelationship(multiPol);
assertEquals(GeoArea.WITHIN, rel);
assertEquals(false, multiPol.intersects(polConvex));
rel = multiPol.getRelationship(polConcave);
assertEquals(GeoArea.DISJOINT, rel);
rel = polConcave.getRelationship(multiPol);
assertEquals(GeoArea.DISJOINT, rel);
assertEquals(false, multiPol.intersects(polConcave));
}
private GeoPolygon buildGeoPolygon(
double lon1,
double lat1,
double lon2,
double lat2,
double lon3,
double lat3,
double lon4,
double lat4,
double lon5,
double lat5) {
GeoPoint point1 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat1), Geo3DUtil.fromDegrees(lon1));
GeoPoint point2 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat2), Geo3DUtil.fromDegrees(lon2));
GeoPoint point3 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat3), Geo3DUtil.fromDegrees(lon3));
GeoPoint point4 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat4), Geo3DUtil.fromDegrees(lon4));
GeoPoint point5 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat5), Geo3DUtil.fromDegrees(lon5));
final List<GeoPoint> points = new ArrayList<>();
points.add(point1);
points.add(point2);
points.add(point3);
points.add(point4);
points.add(point5);
return GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points);
}
private GeoPolygon buildConcaveGeoPolygon(
double lon1,
double lat1,
double lon2,
double lat2,
double lon3,
double lat3,
double lon4,
double lat4) {
GeoPoint point1 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat1), Geo3DUtil.fromDegrees(lon1));
GeoPoint point2 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat2), Geo3DUtil.fromDegrees(lon2));
GeoPoint point3 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat3), Geo3DUtil.fromDegrees(lon3));
GeoPoint point4 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat4), Geo3DUtil.fromDegrees(lon4));
final List<GeoPoint> points = new ArrayList<>();
points.add(point1);
points.add(point2);
points.add(point3);
points.add(point4);
return GeoPolygonFactory.makeGeoConcavePolygon(PlanetModel.SPHERE, points);
}
private GeoPolygon getCompositePolygon() {
// POLYGON((0 80, 45 85 ,90 80,135 85,180 80, -135 85, -90 80, -45 85,0 80))
GeoPoint point1 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(80), Geo3DUtil.fromDegrees(0));
GeoPoint point2 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(85), Geo3DUtil.fromDegrees(45));
GeoPoint point3 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(80), Geo3DUtil.fromDegrees(90));
GeoPoint point4 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(85), Geo3DUtil.fromDegrees(135));
GeoPoint point5 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(80), Geo3DUtil.fromDegrees(180));
GeoPoint point6 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(85), Geo3DUtil.fromDegrees(-135));
GeoPoint point7 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(80), Geo3DUtil.fromDegrees(-90));
GeoPoint point8 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(85), Geo3DUtil.fromDegrees(-45));
final List<GeoPoint> points = new ArrayList<>();
points.add(point1);
points.add(point2);
points.add(point3);
points.add(point4);
points.add(point5);
points.add(point6);
points.add(point7);
points.add(point8);
return GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points);
}
private GeoPolygon getComplexPolygon() {
// POLYGON((0 80, 45 85 ,90 80,135 85,180 80, -135 85, -90 80, -45 85,0 80))
GeoPoint point1 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(80), Geo3DUtil.fromDegrees(0));
GeoPoint point2 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(85), Geo3DUtil.fromDegrees(45));
GeoPoint point3 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(80), Geo3DUtil.fromDegrees(90));
GeoPoint point4 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(85), Geo3DUtil.fromDegrees(135));
GeoPoint point5 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(80), Geo3DUtil.fromDegrees(180));
GeoPoint point6 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(85), Geo3DUtil.fromDegrees(-135));
GeoPoint point7 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(80), Geo3DUtil.fromDegrees(-90));
GeoPoint point8 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(85), Geo3DUtil.fromDegrees(-45));
final List<GeoPoint> points = new ArrayList<>();
points.add(point1);
points.add(point2);
points.add(point3);
points.add(point4);
points.add(point5);
points.add(point6);
points.add(point7);
points.add(point8);
GeoPolygonFactory.PolygonDescription pd = new GeoPolygonFactory.PolygonDescription(points);
return GeoPolygonFactory.makeLargeGeoPolygon(PlanetModel.SPHERE, Collections.singletonList(pd));
}
private GeoPolygon getMultiPolygon() {
// MULTIPOLYGON(((-145.790967486 -5.17543698881, -145.790854979 -5.11348060995, -145.853073512
// -5.11339421216, -145.853192037 -5.17535061936, -145.790967486 -5.17543698881)),
// ((-145.8563923 -5.17527125408, -145.856222168 -5.11332154814, -145.918433943 -5.11317773171,
// -145.918610092 -5.17512738429, -145.8563923 -5.17527125408)))
GeoPoint point1 =
new GeoPoint(
PlanetModel.SPHERE,
Geo3DUtil.fromDegrees(-5.17543698881),
Geo3DUtil.fromDegrees(-145.790967486));
GeoPoint point2 =
new GeoPoint(
PlanetModel.SPHERE,
Geo3DUtil.fromDegrees(-5.11348060995),
Geo3DUtil.fromDegrees(-145.790854979));
GeoPoint point3 =
new GeoPoint(
PlanetModel.SPHERE,
Geo3DUtil.fromDegrees(-5.11339421216),
Geo3DUtil.fromDegrees(-145.853073512));
GeoPoint point4 =
new GeoPoint(
PlanetModel.SPHERE,
Geo3DUtil.fromDegrees(-5.17535061936),
Geo3DUtil.fromDegrees(-145.853192037));
final List<GeoPoint> points1 = new ArrayList<>();
points1.add(point1);
points1.add(point2);
points1.add(point3);
points1.add(point4);
GeoPolygonFactory.PolygonDescription pd1 = new GeoPolygonFactory.PolygonDescription(points1);
GeoPoint point5 =
new GeoPoint(
PlanetModel.SPHERE,
Geo3DUtil.fromDegrees(-5.17527125408),
Geo3DUtil.fromDegrees(-145.8563923));
GeoPoint point6 =
new GeoPoint(
PlanetModel.SPHERE,
Geo3DUtil.fromDegrees(-5.11332154814),
Geo3DUtil.fromDegrees(-145.856222168));
GeoPoint point7 =
new GeoPoint(
PlanetModel.SPHERE,
Geo3DUtil.fromDegrees(-5.11317773171),
Geo3DUtil.fromDegrees(-145.918433943));
GeoPoint point8 =
new GeoPoint(
PlanetModel.SPHERE,
Geo3DUtil.fromDegrees(-5.17512738429),
Geo3DUtil.fromDegrees(-145.918610092));
final List<GeoPoint> points2 = new ArrayList<>();
points2.add(point5);
points2.add(point6);
points2.add(point7);
points2.add(point8);
GeoPolygonFactory.PolygonDescription pd2 = new GeoPolygonFactory.PolygonDescription(points2);
final List<GeoPolygonFactory.PolygonDescription> pds = new ArrayList<>();
pds.add(pd1);
pds.add(pd2);
return GeoPolygonFactory.makeLargeGeoPolygon(PlanetModel.SPHERE, pds);
}
public GeoShape getInsideCompositeShape() {
// MULTIPOLYGON(((19.945091 -60.552631, 20.319948 -61.555652, 20.9 -61.5, 20.9 -61, 19.945091
// -60.552631)),
// ((21.1 -61.5, 23.107901 -61.253298, 22.720804 -60.457713,21.1 -61, 21.1 -61.5)))
GeoPoint point1 =
new GeoPoint(
PlanetModel.SPHERE,
Geo3DUtil.fromDegrees(-60.552631),
Geo3DUtil.fromDegrees(19.945091));
GeoPoint point2 =
new GeoPoint(
PlanetModel.SPHERE,
Geo3DUtil.fromDegrees(-61.555652),
Geo3DUtil.fromDegrees(20.319948));
GeoPoint point3 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-61.5), Geo3DUtil.fromDegrees(20.9));
GeoPoint point4 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-61), Geo3DUtil.fromDegrees(20.9));
final List<GeoPoint> points1 = new ArrayList<>();
points1.add(point1);
points1.add(point2);
points1.add(point3);
points1.add(point4);
GeoPoint point5 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-61.5), Geo3DUtil.fromDegrees(21.1));
GeoPoint point6 =
new GeoPoint(
PlanetModel.SPHERE,
Geo3DUtil.fromDegrees(-61.253298),
Geo3DUtil.fromDegrees(23.107901));
GeoPoint point7 =
new GeoPoint(
PlanetModel.SPHERE,
Geo3DUtil.fromDegrees(-60.457713),
Geo3DUtil.fromDegrees(22.720804));
GeoPoint point8 =
new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-61), Geo3DUtil.fromDegrees(21.1));
final List<GeoPoint> points2 = new ArrayList<>();
points2.add(point5);
points2.add(point6);
points2.add(point7);
points2.add(point8);
GeoPolygon p1 = GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points1);
GeoPolygon p2 = GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points2);
GeoCompositeMembershipShape compositeMembershipShape =
new GeoCompositeMembershipShape(PlanetModel.SPHERE);
compositeMembershipShape.addShape(p1);
compositeMembershipShape.addShape(p2);
return compositeMembershipShape;
}
}
|
apache/ranger | 35,854 | agents-audit/core/src/main/java/org/apache/ranger/audit/queue/AuditFileCacheProviderSpool.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ranger.audit.queue;
import org.apache.ranger.audit.model.AuditEventBase;
import org.apache.ranger.audit.model.AuthzAuditEvent;
import org.apache.ranger.audit.provider.AuditHandler;
import org.apache.ranger.audit.provider.MiscUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.MDC;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
/**
* This class temporarily stores logs in Local file system before it despatches each logs in file to the AuditBatchQueue Consumer.
* This gets instantiated only when AuditFileCacheProvider is enabled (xasecure.audit.provider.filecache.is.enabled).
* When AuditFileCacheProvider is all the logs are stored in local file system before sent to destination.
*/
public class AuditFileCacheProviderSpool implements Runnable {
private static final Logger logger = LoggerFactory.getLogger(AuditFileCacheProviderSpool.class);
public static final String PROP_FILE_SPOOL_LOCAL_DIR = "filespool.dir";
public static final String PROP_FILE_SPOOL_LOCAL_FILE_NAME = "filespool.filename.format";
public static final String PROP_FILE_SPOOL_ARCHIVE_DIR = "filespool.archive.dir";
public static final String PROP_FILE_SPOOL_ARCHIVE_MAX_FILES_COUNT = "filespool.archive.max.files";
public static final String PROP_FILE_SPOOL_FILENAME_PREFIX = "filespool.file.prefix";
public static final String PROP_FILE_SPOOL_FILE_ROLLOVER = "filespool.file.rollover.sec";
public static final String PROP_FILE_SPOOL_INDEX_FILE = "filespool.index.filename";
public static final String PROP_FILE_SPOOL_DEST_RETRY_MS = "filespool.destination.retry.ms";
public static final String PROP_FILE_SPOOL_BATCH_SIZE = "filespool.buffer.size";
public static final String AUDIT_IS_FILE_CACHE_PROVIDER_ENABLE_PROP = "xasecure.audit.provider.filecache.is.enabled";
public static final String FILE_CACHE_PROVIDER_NAME = "AuditFileCacheProviderSpool";
AuditHandler consumerProvider;
BlockingQueue<AuditIndexRecord> indexQueue = new LinkedBlockingQueue<>();
List<AuditIndexRecord> indexRecords = new ArrayList<>();
// Folder and File attributes
File logFolder;
String logFileNameFormat;
File archiveFolder;
String fileNamePrefix;
String indexFileName;
File indexFile;
String indexDoneFileName;
File indexDoneFile;
long lastErrorLogMS;
boolean isAuditFileCacheProviderEnabled;
boolean closeFile;
boolean isPending;
long lastAttemptTime;
boolean initDone;
PrintWriter logWriter;
AuditIndexRecord currentWriterIndexRecord;
AuditIndexRecord currentConsumerIndexRecord;
Thread destinationThread;
boolean isDrain;
boolean isDestDown;
int retryDestinationMS = 30 * 1000; // Default 30 seconds
int fileRolloverSec = 24 * 60 * 60; // In seconds
int maxArchiveFiles = 100;
int errorLogIntervalMS = 30 * 1000; // Every 30 seconds
int auditBatchSize = 1000;
boolean isWriting = true;
boolean isSpoolingSuccessful = true;
public AuditFileCacheProviderSpool(AuditHandler consumerProvider) {
this.consumerProvider = consumerProvider;
}
public void init(Properties prop) {
init(prop, null);
}
public boolean init(Properties props, String basePropertyName) {
logger.debug("==> AuditFileCacheProviderSpool.init()");
if (initDone) {
logger.error("init() called more than once. queueProvider=, consumerProvider={}", consumerProvider.getName());
return true;
}
String propPrefix = "xasecure.audit.filespool";
if (basePropertyName != null) {
propPrefix = basePropertyName;
}
try {
// Initial folder and file properties
String logFolderProp = MiscUtil.getStringProperty(props, propPrefix + "." + PROP_FILE_SPOOL_LOCAL_DIR);
String archiveFolderProp = MiscUtil.getStringProperty(props, propPrefix + "." + PROP_FILE_SPOOL_ARCHIVE_DIR);
logFileNameFormat = MiscUtil.getStringProperty(props, basePropertyName + "." + PROP_FILE_SPOOL_LOCAL_FILE_NAME);
fileNamePrefix = MiscUtil.getStringProperty(props, propPrefix + "." + PROP_FILE_SPOOL_FILENAME_PREFIX);
indexFileName = MiscUtil.getStringProperty(props, propPrefix + "." + PROP_FILE_SPOOL_INDEX_FILE);
retryDestinationMS = MiscUtil.getIntProperty(props, propPrefix + "." + PROP_FILE_SPOOL_DEST_RETRY_MS, retryDestinationMS);
fileRolloverSec = MiscUtil.getIntProperty(props, propPrefix + "." + PROP_FILE_SPOOL_FILE_ROLLOVER, fileRolloverSec);
maxArchiveFiles = MiscUtil.getIntProperty(props, propPrefix + "." + PROP_FILE_SPOOL_ARCHIVE_MAX_FILES_COUNT, maxArchiveFiles);
isAuditFileCacheProviderEnabled = MiscUtil.getBooleanProperty(props, AUDIT_IS_FILE_CACHE_PROVIDER_ENABLE_PROP, false);
logger.info("retryDestinationMS={}, queueName={}", retryDestinationMS, FILE_CACHE_PROVIDER_NAME);
logger.info("fileRolloverSec={}, queueName={}", fileRolloverSec, FILE_CACHE_PROVIDER_NAME);
logger.info("maxArchiveFiles={}, queueName={}", maxArchiveFiles, FILE_CACHE_PROVIDER_NAME);
if (logFolderProp == null || logFolderProp.isEmpty()) {
logger.error("Audit spool folder is not configured. Please set {}.{}.queueName={}", propPrefix, PROP_FILE_SPOOL_LOCAL_DIR, FILE_CACHE_PROVIDER_NAME);
return false;
}
logFolder = new File(logFolderProp);
if (!logFolder.isDirectory()) {
boolean result = logFolder.mkdirs();
if (!logFolder.isDirectory() || !result) {
logger.error("File Spool folder not found and can't be created. folder={}, queueName={}", logFolder.getAbsolutePath(), FILE_CACHE_PROVIDER_NAME);
return false;
}
}
logger.info("logFolder={}, queueName={}", logFolder, FILE_CACHE_PROVIDER_NAME);
if (logFileNameFormat == null || logFileNameFormat.isEmpty()) {
logFileNameFormat = "spool_" + "%app-type%" + "_" + "%time:yyyyMMdd-HHmm.ss%.log";
}
logger.info("logFileNameFormat={}, queueName={}", logFileNameFormat, FILE_CACHE_PROVIDER_NAME);
if (archiveFolderProp == null || archiveFolderProp.isEmpty()) {
archiveFolder = new File(logFolder, "archive");
} else {
archiveFolder = new File(archiveFolderProp);
}
if (!archiveFolder.isDirectory()) {
boolean result = archiveFolder.mkdirs();
if (!archiveFolder.isDirectory() || !result) {
logger.error("File Spool archive folder not found and can't be created. folder={}, queueName={}", archiveFolder.getAbsolutePath(), FILE_CACHE_PROVIDER_NAME);
return false;
}
}
logger.info("archiveFolder={}, queueName={}", archiveFolder, FILE_CACHE_PROVIDER_NAME);
if (indexFileName == null || indexFileName.isEmpty()) {
if (fileNamePrefix == null || fileNamePrefix.isEmpty()) {
fileNamePrefix = FILE_CACHE_PROVIDER_NAME + "_" + consumerProvider.getName();
}
indexFileName = "index_" + fileNamePrefix + "_" + "%app-type%" + ".json";
indexFileName = MiscUtil.replaceTokens(indexFileName, System.currentTimeMillis());
}
indexFile = new File(logFolder, indexFileName);
if (!indexFile.exists()) {
boolean ret = indexFile.createNewFile();
if (!ret) {
logger.error("Error creating index file. fileName={}", indexFile.getPath());
return false;
}
}
logger.info("indexFile={}, queueName={}", indexFile, FILE_CACHE_PROVIDER_NAME);
int lastDot = indexFileName.lastIndexOf('.');
if (lastDot < 0) {
lastDot = indexFileName.length() - 1;
}
indexDoneFileName = indexFileName.substring(0, lastDot) + "_closed.json";
indexDoneFile = new File(logFolder, indexDoneFileName);
if (!indexDoneFile.exists()) {
boolean ret = indexDoneFile.createNewFile();
if (!ret) {
logger.error("Error creating index done file. fileName={}", indexDoneFile.getPath());
return false;
}
}
logger.info("indexDoneFile={}, queueName={}", indexDoneFile, FILE_CACHE_PROVIDER_NAME);
// Load index file
loadIndexFile();
for (AuditIndexRecord auditIndexRecord : indexRecords) {
if (!auditIndexRecord.status.equals(SPOOL_FILE_STATUS.done)) {
isPending = true;
}
if (auditIndexRecord.status.equals(SPOOL_FILE_STATUS.write_inprogress)) {
currentWriterIndexRecord = auditIndexRecord;
logger.info("currentWriterIndexRecord={}, queueName={}", currentWriterIndexRecord.filePath, FILE_CACHE_PROVIDER_NAME);
}
if (auditIndexRecord.status.equals(SPOOL_FILE_STATUS.read_inprogress)) {
indexQueue.add(auditIndexRecord);
}
}
printIndex();
for (AuditIndexRecord auditIndexRecord : indexRecords) {
if (auditIndexRecord.status.equals(SPOOL_FILE_STATUS.pending)) {
File consumerFile = new File(auditIndexRecord.filePath);
if (!consumerFile.exists()) {
logger.error("INIT: Consumer file={} not found.", consumerFile.getPath());
} else {
indexQueue.add(auditIndexRecord);
}
}
}
} catch (Throwable t) {
logger.error("Error initializing File Spooler. queue={}", FILE_CACHE_PROVIDER_NAME, t);
return false;
}
auditBatchSize = MiscUtil.getIntProperty(props, propPrefix + "." + PROP_FILE_SPOOL_BATCH_SIZE, auditBatchSize);
initDone = true;
logger.debug("<== AuditFileCacheProviderSpool.init()");
return true;
}
/**
* Start looking for outstanding logs and update status according.
*/
public void start() {
if (!initDone) {
logger.error("Cannot start Audit File Spooler. Initilization not done yet. queueName={}", FILE_CACHE_PROVIDER_NAME);
return;
}
logger.info("Starting writerThread, queueName={}, consumer={}", FILE_CACHE_PROVIDER_NAME, consumerProvider.getName());
// Let's start the thread to read
destinationThread = new Thread(this, FILE_CACHE_PROVIDER_NAME + "_" + consumerProvider.getName() + "_destWriter");
destinationThread.setDaemon(true);
destinationThread.start();
}
public void stop() {
if (!initDone) {
logger.error("Cannot stop Audit File Spooler. Initilization not done. queueName={}", FILE_CACHE_PROVIDER_NAME);
return;
}
logger.info("Stop called, queueName={}, consumer={}", FILE_CACHE_PROVIDER_NAME, consumerProvider.getName());
isDrain = true;
flush();
PrintWriter out = getOpenLogFileStream();
if (out != null) {
// If write is still going on, then let's give it enough time to
// complete
for (int i = 0; i < 3; i++) {
if (isWriting) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
// ignore
}
continue;
}
try {
logger.info("Closing open file, queueName={}, consumer={}", FILE_CACHE_PROVIDER_NAME, consumerProvider.getName());
out.flush();
out.close();
break;
} catch (Throwable t) {
logger.debug("Error closing spool out file.", t);
}
}
}
try {
if (destinationThread != null) {
destinationThread.interrupt();
}
destinationThread = null;
} catch (Throwable e) {
// ignore
}
}
public void flush() {
if (!initDone) {
logger.error("Cannot flush Audit File Spooler. Initilization not done. queueName={}", FILE_CACHE_PROVIDER_NAME);
return;
}
PrintWriter out = getOpenLogFileStream();
if (out != null) {
out.flush();
}
}
/**
* If any files are still not processed. Also, if the destination is not
* reachable
*
* @return
*/
public boolean isPending() {
if (!initDone) {
logError("isPending(): File Spooler not initialized. queueName=" + FILE_CACHE_PROVIDER_NAME);
return false;
}
return isPending;
}
/**
* Milliseconds from last attempt time
*
* @return
*/
public long getLastAttemptTimeDelta() {
if (lastAttemptTime == 0) {
return 0;
}
return System.currentTimeMillis() - lastAttemptTime;
}
public synchronized void stashLogs(AuditEventBase event) {
if (isDrain) {
// Stop has been called, so this method shouldn't be called
logger.error("stashLogs() is called after stop is called. event={}", event);
return;
}
try {
isWriting = true;
PrintWriter logOut = getLogFileStream();
String jsonStr = MiscUtil.stringify(event); // Convert event to json
logOut.println(jsonStr);
logOut.flush();
isPending = true;
isSpoolingSuccessful = true;
} catch (Throwable t) {
isSpoolingSuccessful = false;
logger.error("Error writing to file. event={}", event, t);
} finally {
isWriting = false;
}
}
public synchronized void stashLogs(Collection<AuditEventBase> events) {
for (AuditEventBase event : events) {
stashLogs(event);
}
flush();
}
public synchronized void stashLogsString(String event) {
if (isDrain) {
// Stop has been called, so this method shouldn't be called
logger.error("stashLogs() is called after stop is called. event={}", event);
return;
}
try {
isWriting = true;
PrintWriter logOut = getLogFileStream();
logOut.println(event);
} catch (Exception ex) {
logger.error("Error writing to file. event={}", event, ex);
} finally {
isWriting = false;
}
}
public synchronized boolean isSpoolingSuccessful() {
return isSpoolingSuccessful;
}
public synchronized void stashLogsString(Collection<String> events) {
for (String event : events) {
stashLogsString(event);
}
flush();
}
/*
* (non-Javadoc)
*
* @see java.lang.Runnable#run()
*/
@Override
public void run() {
try {
//This is done to clear the MDC context to avoid issue with Ranger Auditing for Knox
MDC.clear();
runLogAudit();
} catch (Throwable t) {
logger.error("Exited thread without abnormaly. queue={}", consumerProvider.getName(), t);
}
}
public void runLogAudit() {
// boolean isResumed = false;
while (true) {
try {
if (isDestDown) {
logger.info("Destination is down. sleeping for {} milli seconds. indexQueue={}, queueName={}, consumer={}", retryDestinationMS, indexQueue.size(), FILE_CACHE_PROVIDER_NAME, consumerProvider.getName());
Thread.sleep(retryDestinationMS);
}
// Let's pause between each iteration
if (currentConsumerIndexRecord == null) {
currentConsumerIndexRecord = indexQueue.poll(retryDestinationMS, TimeUnit.MILLISECONDS);
} else {
Thread.sleep(retryDestinationMS);
}
if (isDrain) {
// Need to exit
break;
}
if (currentConsumerIndexRecord == null) {
closeFileIfNeeded();
continue;
}
boolean isRemoveIndex = false;
File consumerFile = new File(currentConsumerIndexRecord.filePath);
if (!consumerFile.exists()) {
logger.error("Consumer file={} not found.", consumerFile.getPath());
printIndex();
isRemoveIndex = true;
} else {
// Let's open the file to write
try (BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(currentConsumerIndexRecord.filePath), StandardCharsets.UTF_8))) {
int startLine = currentConsumerIndexRecord.linePosition;
int currLine = 0;
List<AuditEventBase> events = new ArrayList<>();
for (String line = br.readLine(); line != null; line = br.readLine()) {
currLine++;
if (currLine < startLine) {
continue;
}
AuditEventBase event = MiscUtil.fromJson(line, AuthzAuditEvent.class);
events.add(event);
if (events.size() == auditBatchSize) {
boolean ret = sendEvent(events, currentConsumerIndexRecord, currLine);
if (!ret) {
throw new Exception("Destination down");
}
events.clear();
}
}
if (!events.isEmpty()) {
boolean ret = sendEvent(events, currentConsumerIndexRecord, currLine);
if (!ret) {
throw new Exception("Destination down");
}
events.clear();
}
logger.info("Done reading file. file={}, queueName={}, consumer={}", currentConsumerIndexRecord.filePath, FILE_CACHE_PROVIDER_NAME, consumerProvider.getName());
// The entire file is read
currentConsumerIndexRecord.status = SPOOL_FILE_STATUS.done;
currentConsumerIndexRecord.doneCompleteTime = new Date();
currentConsumerIndexRecord.lastAttempt = true;
isRemoveIndex = true;
} catch (Exception ex) {
isDestDown = true;
logError("Destination down. queueName=" + FILE_CACHE_PROVIDER_NAME + ", consumer=" + consumerProvider.getName());
lastAttemptTime = System.currentTimeMillis();
// Update the index file
currentConsumerIndexRecord.lastFailedTime = new Date();
currentConsumerIndexRecord.failedAttemptCount++;
currentConsumerIndexRecord.lastAttempt = false;
saveIndexFile();
}
}
if (isRemoveIndex) {
// Remove this entry from index
removeIndexRecord(currentConsumerIndexRecord);
currentConsumerIndexRecord = null;
closeFileIfNeeded();
}
} catch (InterruptedException e) {
logger.info("Caught exception in consumer thread. Shutdown might be in progress");
} catch (Throwable t) {
logger.error("Exception in destination writing thread.", t);
}
}
logger.info("Exiting file spooler. provider={}, consumer={}", FILE_CACHE_PROVIDER_NAME, consumerProvider.getName());
}
/**
* Load the index file
*
* @throws IOException
*/
void loadIndexFile() throws IOException {
logger.info("Loading index file. fileName={}", indexFile.getPath());
try (BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(indexFile), StandardCharsets.UTF_8))) {
indexRecords.clear();
for (String line = br.readLine(); line != null; line = br.readLine()) {
if (!line.isEmpty() && !line.startsWith("#")) {
try {
AuditIndexRecord record = MiscUtil.fromJson(line, AuditIndexRecord.class);
indexRecords.add(record);
} catch (Exception e) {
logger.error("Error parsing following JSON: {}", line, e);
}
}
}
}
}
synchronized void printIndex() {
logger.info("INDEX printIndex() ==== START");
for (AuditIndexRecord record : indexRecords) {
logger.info("INDEX={}, isFileExist={}", record, (new File(record.filePath).exists()));
}
logger.info("INDEX printIndex() ==== END");
}
synchronized void removeIndexRecord(AuditIndexRecord indexRecord) throws IOException {
Iterator<AuditIndexRecord> iter = indexRecords.iterator();
while (iter.hasNext()) {
AuditIndexRecord record = iter.next();
if (record.id.equals(indexRecord.id)) {
logger.info("Removing file from index. file={}, queueName={}, consumer={}", record.filePath, FILE_CACHE_PROVIDER_NAME, consumerProvider.getName());
iter.remove();
appendToDoneFile(record);
}
}
saveIndexFile();
// If there are no more files in the index, then let's assume the destination is now available
if (indexRecords.isEmpty()) {
isPending = false;
}
}
synchronized void saveIndexFile() throws IOException {
PrintWriter out = new PrintWriter(indexFile, StandardCharsets.UTF_8.name());
for (AuditIndexRecord auditIndexRecord : indexRecords) {
out.println(MiscUtil.stringify(auditIndexRecord));
}
out.close();
// printIndex();
}
void appendToDoneFile(AuditIndexRecord indexRecord) throws IOException {
logger.info("Moving to done file. {}, queueName={}, consumer={}", indexRecord.filePath, FILE_CACHE_PROVIDER_NAME, consumerProvider.getName());
try (PrintWriter out = new PrintWriter(new BufferedWriter(new OutputStreamWriter(new FileOutputStream(indexDoneFile, true), StandardCharsets.UTF_8)))) {
String line = MiscUtil.stringify(indexRecord);
out.println(line);
out.flush();
}
// After Each file is read and audit events are pushed into pipe, we flush to reach the destination immediate.
consumerProvider.flush();
// Move to archive folder
File logFile = null;
File archiveFile = null;
try {
logFile = new File(indexRecord.filePath);
archiveFile = new File(archiveFolder, logFile.getName());
logger.info("Moving logFile {} to {}", logFile, archiveFile);
boolean result = logFile.renameTo(archiveFile);
if (!result) {
logger.error("Error moving log file to archive folder. Unable to rename {} to archiveFile={}", logFile, archiveFile);
}
} catch (Throwable t) {
logger.error("Error moving log file to archive folder. logFile={}, archiveFile={}", logFile, archiveFile, t);
}
// After archiving the file flush the pipe
consumerProvider.flush();
archiveFile = null;
try {
// Remove old files
File[] logFiles = archiveFolder.listFiles(pathname -> pathname.getName().toLowerCase().endsWith(".log"));
if (logFiles != null && logFiles.length > maxArchiveFiles) {
int filesToDelete = logFiles.length - maxArchiveFiles;
try (BufferedReader br = new BufferedReader(new FileReader(indexDoneFile))) {
int filesDeletedCount = 0;
for (String line = br.readLine(); line != null; line = br.readLine()) {
if (!line.isEmpty() && !line.startsWith("#")) {
try {
AuditIndexRecord record = MiscUtil.fromJson(line, AuditIndexRecord.class);
if (record == null) {
logger.warn("failed to parse index record: {}", line);
continue;
}
logFile = new File(record.filePath);
archiveFile = new File(archiveFolder, logFile.getName());
if (archiveFile.exists()) {
logger.info("Deleting archive file {}", archiveFile);
boolean ret = archiveFile.delete();
if (!ret) {
logger.error("Error deleting archive file. archiveFile={}", archiveFile);
}
filesDeletedCount++;
if (filesDeletedCount >= filesToDelete) {
logger.info("Deleted {} files", filesDeletedCount);
break;
}
}
} catch (Exception e) {
logger.error("Error parsing following JSON: {}", line, e);
}
}
}
}
}
} catch (Throwable t) {
logger.error("Error deleting older archive file. archiveFile={}", archiveFile, t);
}
}
void logError(String msg) {
long currTimeMS = System.currentTimeMillis();
if (currTimeMS - lastErrorLogMS > errorLogIntervalMS) {
logger.error(msg);
lastErrorLogMS = currTimeMS;
}
}
/**
* This return the current file. If there are not current open output file,
* then it will return null
*
* @return
*/
private synchronized PrintWriter getOpenLogFileStream() {
return logWriter;
}
/**
* @return
* @throws Exception
*/
private synchronized PrintWriter getLogFileStream() throws Exception {
closeFileIfNeeded();
// Either there are no open log file or the previous one has been rolled over
if (currentWriterIndexRecord == null) {
// Create a new file
Date currentTime = new Date();
String fileName = MiscUtil.replaceTokens(logFileNameFormat, currentTime.getTime());
String newFileName = fileName;
File outLogFile;
int i = 0;
while (true) {
outLogFile = new File(logFolder, newFileName);
File archiveLogFile = new File(archiveFolder, newFileName);
if (!outLogFile.exists() && !archiveLogFile.exists()) {
break;
}
i++;
int lastDot = fileName.lastIndexOf('.');
String baseName = fileName.substring(0, lastDot);
String extension = fileName.substring(lastDot);
newFileName = baseName + "." + i + extension;
}
fileName = newFileName;
logger.info("Creating new file. queueName={}, fileName={}", FILE_CACHE_PROVIDER_NAME, fileName);
// Open the file
logWriter = new PrintWriter(new BufferedWriter(new OutputStreamWriter(new FileOutputStream(outLogFile), StandardCharsets.UTF_8)));
AuditIndexRecord tmpIndexRecord = new AuditIndexRecord();
tmpIndexRecord.id = MiscUtil.generateUniqueId();
tmpIndexRecord.filePath = outLogFile.getPath();
tmpIndexRecord.status = SPOOL_FILE_STATUS.write_inprogress;
tmpIndexRecord.fileCreateTime = currentTime;
tmpIndexRecord.lastAttempt = true;
currentWriterIndexRecord = tmpIndexRecord;
indexRecords.add(currentWriterIndexRecord);
saveIndexFile();
} else {
if (logWriter == null) {
// This means the process just started. We need to open the file in append mode.
logger.info("Opening existing file for append. queueName={}, fileName={}", FILE_CACHE_PROVIDER_NAME, currentWriterIndexRecord.filePath);
logWriter = new PrintWriter(new BufferedWriter(new OutputStreamWriter(new FileOutputStream(currentWriterIndexRecord.filePath, true), StandardCharsets.UTF_8)));
}
}
return logWriter;
}
private synchronized void closeFileIfNeeded() throws IOException {
// Is there file open to write or there are no pending file, then close the active file
if (currentWriterIndexRecord != null) {
// Check whether the file needs to rolled
rollOverSpoolFileByTime();
if (closeFile) {
// Roll the file
if (logWriter != null) {
logWriter.flush();
logWriter.close();
logWriter = null;
closeFile = false;
}
currentWriterIndexRecord.status = SPOOL_FILE_STATUS.pending;
currentWriterIndexRecord.writeCompleteTime = new Date();
saveIndexFile();
logger.info("Adding file to queue. queueName={}, fileName={}", FILE_CACHE_PROVIDER_NAME, currentWriterIndexRecord.filePath);
indexQueue.add(currentWriterIndexRecord);
currentWriterIndexRecord = null;
}
}
}
private void rollOverSpoolFileByTime() {
if (System.currentTimeMillis() - currentWriterIndexRecord.fileCreateTime.getTime() > fileRolloverSec * 1000L) {
closeFile = true;
logger.info("Closing file. Rolling over. queueName={}, fileName={}", FILE_CACHE_PROVIDER_NAME, currentWriterIndexRecord.filePath);
}
}
private boolean sendEvent(List<AuditEventBase> events, AuditIndexRecord indexRecord, int currLine) {
boolean ret = true;
try {
ret = consumerProvider.log(events);
if (!ret) {
// Need to log error after fixed interval
logError("Error sending logs to consumer. provider=" + FILE_CACHE_PROVIDER_NAME + ", consumer=" + consumerProvider.getName());
} else {
// Update index and save
indexRecord.linePosition = currLine;
indexRecord.status = SPOOL_FILE_STATUS.read_inprogress;
indexRecord.lastSuccessTime = new Date();
indexRecord.lastAttempt = true;
saveIndexFile();
if (isDestDown) {
isDestDown = false;
logger.info("Destination up now. {}, queueName={}, consumer={}", indexRecord.filePath, FILE_CACHE_PROVIDER_NAME, consumerProvider.getName());
}
}
} catch (Throwable t) {
logger.error("Error while sending logs to consumer. provider={}, consumer={}, log={}", FILE_CACHE_PROVIDER_NAME, consumerProvider.getName(), events, t);
}
return ret;
}
public enum SPOOL_FILE_STATUS {
pending, write_inprogress, read_inprogress, done
}
static class AuditIndexRecord {
String id;
String filePath;
int linePosition;
SPOOL_FILE_STATUS status = SPOOL_FILE_STATUS.write_inprogress;
Date fileCreateTime;
Date writeCompleteTime;
Date doneCompleteTime;
Date lastSuccessTime;
Date lastFailedTime;
int failedAttemptCount;
boolean lastAttempt;
@Override
public String toString() {
return "AuditIndexRecord [id=" + id + ", filePath=" + filePath
+ ", linePosition=" + linePosition + ", status=" + status
+ ", fileCreateTime=" + fileCreateTime
+ ", writeCompleteTime=" + writeCompleteTime
+ ", doneCompleteTime=" + doneCompleteTime
+ ", lastSuccessTime=" + lastSuccessTime
+ ", lastFailedTime=" + lastFailedTime
+ ", failedAttemptCount=" + failedAttemptCount
+ ", lastAttempt=" + lastAttempt + "]";
}
}
}
|
googleapis/google-cloud-java | 35,824 | java-apihub/proto-google-cloud-apihub-v1/src/main/java/com/google/cloud/apihub/v1/ListAttributesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/apihub/v1/apihub_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.apihub.v1;
/**
*
*
* <pre>
* The [ListAttributes][google.cloud.apihub.v1.ApiHub.ListAttributes] method's
* response.
* </pre>
*
* Protobuf type {@code google.cloud.apihub.v1.ListAttributesResponse}
*/
public final class ListAttributesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.apihub.v1.ListAttributesResponse)
ListAttributesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListAttributesResponse.newBuilder() to construct.
private ListAttributesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListAttributesResponse() {
attributes_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListAttributesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apihub.v1.ApiHubServiceProto
.internal_static_google_cloud_apihub_v1_ListAttributesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apihub.v1.ApiHubServiceProto
.internal_static_google_cloud_apihub_v1_ListAttributesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apihub.v1.ListAttributesResponse.class,
com.google.cloud.apihub.v1.ListAttributesResponse.Builder.class);
}
public static final int ATTRIBUTES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.apihub.v1.Attribute> attributes_;
/**
*
*
* <pre>
* The list of all attributes.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.Attribute attributes = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.apihub.v1.Attribute> getAttributesList() {
return attributes_;
}
/**
*
*
* <pre>
* The list of all attributes.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.Attribute attributes = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.apihub.v1.AttributeOrBuilder>
getAttributesOrBuilderList() {
return attributes_;
}
/**
*
*
* <pre>
* The list of all attributes.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.Attribute attributes = 1;</code>
*/
@java.lang.Override
public int getAttributesCount() {
return attributes_.size();
}
/**
*
*
* <pre>
* The list of all attributes.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.Attribute attributes = 1;</code>
*/
@java.lang.Override
public com.google.cloud.apihub.v1.Attribute getAttributes(int index) {
return attributes_.get(index);
}
/**
*
*
* <pre>
* The list of all attributes.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.Attribute attributes = 1;</code>
*/
@java.lang.Override
public com.google.cloud.apihub.v1.AttributeOrBuilder getAttributesOrBuilder(int index) {
return attributes_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < attributes_.size(); i++) {
output.writeMessage(1, attributes_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < attributes_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, attributes_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.apihub.v1.ListAttributesResponse)) {
return super.equals(obj);
}
com.google.cloud.apihub.v1.ListAttributesResponse other =
(com.google.cloud.apihub.v1.ListAttributesResponse) obj;
if (!getAttributesList().equals(other.getAttributesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAttributesCount() > 0) {
hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER;
hash = (53 * hash) + getAttributesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.apihub.v1.ListAttributesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apihub.v1.ListAttributesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apihub.v1.ListAttributesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apihub.v1.ListAttributesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apihub.v1.ListAttributesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apihub.v1.ListAttributesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apihub.v1.ListAttributesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apihub.v1.ListAttributesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apihub.v1.ListAttributesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.apihub.v1.ListAttributesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apihub.v1.ListAttributesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apihub.v1.ListAttributesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.apihub.v1.ListAttributesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The [ListAttributes][google.cloud.apihub.v1.ApiHub.ListAttributes] method's
* response.
* </pre>
*
* Protobuf type {@code google.cloud.apihub.v1.ListAttributesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.apihub.v1.ListAttributesResponse)
com.google.cloud.apihub.v1.ListAttributesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apihub.v1.ApiHubServiceProto
.internal_static_google_cloud_apihub_v1_ListAttributesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apihub.v1.ApiHubServiceProto
.internal_static_google_cloud_apihub_v1_ListAttributesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apihub.v1.ListAttributesResponse.class,
com.google.cloud.apihub.v1.ListAttributesResponse.Builder.class);
}
// Construct using com.google.cloud.apihub.v1.ListAttributesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (attributesBuilder_ == null) {
attributes_ = java.util.Collections.emptyList();
} else {
attributes_ = null;
attributesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.apihub.v1.ApiHubServiceProto
.internal_static_google_cloud_apihub_v1_ListAttributesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.apihub.v1.ListAttributesResponse getDefaultInstanceForType() {
return com.google.cloud.apihub.v1.ListAttributesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.apihub.v1.ListAttributesResponse build() {
com.google.cloud.apihub.v1.ListAttributesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.apihub.v1.ListAttributesResponse buildPartial() {
com.google.cloud.apihub.v1.ListAttributesResponse result =
new com.google.cloud.apihub.v1.ListAttributesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.apihub.v1.ListAttributesResponse result) {
if (attributesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
attributes_ = java.util.Collections.unmodifiableList(attributes_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.attributes_ = attributes_;
} else {
result.attributes_ = attributesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.apihub.v1.ListAttributesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.apihub.v1.ListAttributesResponse) {
return mergeFrom((com.google.cloud.apihub.v1.ListAttributesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.apihub.v1.ListAttributesResponse other) {
if (other == com.google.cloud.apihub.v1.ListAttributesResponse.getDefaultInstance())
return this;
if (attributesBuilder_ == null) {
if (!other.attributes_.isEmpty()) {
if (attributes_.isEmpty()) {
attributes_ = other.attributes_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAttributesIsMutable();
attributes_.addAll(other.attributes_);
}
onChanged();
}
} else {
if (!other.attributes_.isEmpty()) {
if (attributesBuilder_.isEmpty()) {
attributesBuilder_.dispose();
attributesBuilder_ = null;
attributes_ = other.attributes_;
bitField0_ = (bitField0_ & ~0x00000001);
attributesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAttributesFieldBuilder()
: null;
} else {
attributesBuilder_.addAllMessages(other.attributes_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.apihub.v1.Attribute m =
input.readMessage(
com.google.cloud.apihub.v1.Attribute.parser(), extensionRegistry);
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
attributes_.add(m);
} else {
attributesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.apihub.v1.Attribute> attributes_ =
java.util.Collections.emptyList();
private void ensureAttributesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
attributes_ = new java.util.ArrayList<com.google.cloud.apihub.v1.Attribute>(attributes_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.apihub.v1.Attribute,
com.google.cloud.apihub.v1.Attribute.Builder,
com.google.cloud.apihub.v1.AttributeOrBuilder>
attributesBuilder_;
/**
*
*
* <pre>
* The list of all attributes.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.Attribute attributes = 1;</code>
*/
public java.util.List<com.google.cloud.apihub.v1.Attribute> getAttributesList() {
if (attributesBuilder_ == null) {
return java.util.Collections.unmodifiableList(attributes_);
} else {
return attributesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of all attributes.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.Attribute attributes = 1;</code>
*/
public int getAttributesCount() {
if (attributesBuilder_ == null) {
return attributes_.size();
} else {
return attributesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of all attributes.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.Attribute attributes = 1;</code>
*/
public com.google.cloud.apihub.v1.Attribute getAttributes(int index) {
if (attributesBuilder_ == null) {
return attributes_.get(index);
} else {
return attributesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of all attributes.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.Attribute attributes = 1;</code>
*/
public Builder setAttributes(int index, com.google.cloud.apihub.v1.Attribute value) {
if (attributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributesIsMutable();
attributes_.set(index, value);
onChanged();
} else {
attributesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of all attributes.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.Attribute attributes = 1;</code>
*/
public Builder setAttributes(
int index, com.google.cloud.apihub.v1.Attribute.Builder builderForValue) {
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
attributes_.set(index, builderForValue.build());
onChanged();
} else {
attributesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of all attributes.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.Attribute attributes = 1;</code>
*/
public Builder addAttributes(com.google.cloud.apihub.v1.Attribute value) {
if (attributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributesIsMutable();
attributes_.add(value);
onChanged();
} else {
attributesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of all attributes.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.Attribute attributes = 1;</code>
*/
public Builder addAttributes(int index, com.google.cloud.apihub.v1.Attribute value) {
if (attributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributesIsMutable();
attributes_.add(index, value);
onChanged();
} else {
attributesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of all attributes.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.Attribute attributes = 1;</code>
*/
public Builder addAttributes(com.google.cloud.apihub.v1.Attribute.Builder builderForValue) {
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
attributes_.add(builderForValue.build());
onChanged();
} else {
attributesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of all attributes.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.Attribute attributes = 1;</code>
*/
public Builder addAttributes(
int index, com.google.cloud.apihub.v1.Attribute.Builder builderForValue) {
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
attributes_.add(index, builderForValue.build());
onChanged();
} else {
attributesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of all attributes.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.Attribute attributes = 1;</code>
*/
public Builder addAllAttributes(
java.lang.Iterable<? extends com.google.cloud.apihub.v1.Attribute> values) {
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, attributes_);
onChanged();
} else {
attributesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of all attributes.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.Attribute attributes = 1;</code>
*/
public Builder clearAttributes() {
if (attributesBuilder_ == null) {
attributes_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
attributesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of all attributes.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.Attribute attributes = 1;</code>
*/
public Builder removeAttributes(int index) {
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
attributes_.remove(index);
onChanged();
} else {
attributesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of all attributes.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.Attribute attributes = 1;</code>
*/
public com.google.cloud.apihub.v1.Attribute.Builder getAttributesBuilder(int index) {
return getAttributesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of all attributes.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.Attribute attributes = 1;</code>
*/
public com.google.cloud.apihub.v1.AttributeOrBuilder getAttributesOrBuilder(int index) {
if (attributesBuilder_ == null) {
return attributes_.get(index);
} else {
return attributesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of all attributes.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.Attribute attributes = 1;</code>
*/
public java.util.List<? extends com.google.cloud.apihub.v1.AttributeOrBuilder>
getAttributesOrBuilderList() {
if (attributesBuilder_ != null) {
return attributesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(attributes_);
}
}
/**
*
*
* <pre>
* The list of all attributes.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.Attribute attributes = 1;</code>
*/
public com.google.cloud.apihub.v1.Attribute.Builder addAttributesBuilder() {
return getAttributesFieldBuilder()
.addBuilder(com.google.cloud.apihub.v1.Attribute.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of all attributes.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.Attribute attributes = 1;</code>
*/
public com.google.cloud.apihub.v1.Attribute.Builder addAttributesBuilder(int index) {
return getAttributesFieldBuilder()
.addBuilder(index, com.google.cloud.apihub.v1.Attribute.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of all attributes.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.Attribute attributes = 1;</code>
*/
public java.util.List<com.google.cloud.apihub.v1.Attribute.Builder> getAttributesBuilderList() {
return getAttributesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.apihub.v1.Attribute,
com.google.cloud.apihub.v1.Attribute.Builder,
com.google.cloud.apihub.v1.AttributeOrBuilder>
getAttributesFieldBuilder() {
if (attributesBuilder_ == null) {
attributesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.apihub.v1.Attribute,
com.google.cloud.apihub.v1.Attribute.Builder,
com.google.cloud.apihub.v1.AttributeOrBuilder>(
attributes_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
attributes_ = null;
}
return attributesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.apihub.v1.ListAttributesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.apihub.v1.ListAttributesResponse)
private static final com.google.cloud.apihub.v1.ListAttributesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.apihub.v1.ListAttributesResponse();
}
public static com.google.cloud.apihub.v1.ListAttributesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListAttributesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListAttributesResponse>() {
@java.lang.Override
public ListAttributesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListAttributesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListAttributesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.apihub.v1.ListAttributesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,866 | java-dialogflow/google-cloud-dialogflow/src/test/java/com/google/cloud/dialogflow/v2beta1/DocumentsClientTest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dialogflow.v2beta1;
import static com.google.cloud.dialogflow.v2beta1.DocumentsClient.ListDocumentsPagedResponse;
import static com.google.cloud.dialogflow.v2beta1.DocumentsClient.ListLocationsPagedResponse;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.testing.LocalChannelProvider;
import com.google.api.gax.grpc.testing.MockGrpcService;
import com.google.api.gax.grpc.testing.MockServiceHelper;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.api.gax.rpc.StatusCode;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.collect.Lists;
import com.google.longrunning.Operation;
import com.google.protobuf.AbstractMessage;
import com.google.protobuf.Any;
import com.google.protobuf.Empty;
import com.google.protobuf.FieldMask;
import com.google.rpc.Status;
import io.grpc.StatusRuntimeException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import javax.annotation.Generated;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@Generated("by gapic-generator-java")
public class DocumentsClientTest {
private static MockDocuments mockDocuments;
private static MockLocations mockLocations;
private static MockServiceHelper mockServiceHelper;
private LocalChannelProvider channelProvider;
private DocumentsClient client;
@BeforeClass
public static void startStaticServer() {
mockDocuments = new MockDocuments();
mockLocations = new MockLocations();
mockServiceHelper =
new MockServiceHelper(
UUID.randomUUID().toString(),
Arrays.<MockGrpcService>asList(mockDocuments, mockLocations));
mockServiceHelper.start();
}
@AfterClass
public static void stopServer() {
mockServiceHelper.stop();
}
@Before
public void setUp() throws IOException {
mockServiceHelper.reset();
channelProvider = mockServiceHelper.createChannelProvider();
DocumentsSettings settings =
DocumentsSettings.newBuilder()
.setTransportChannelProvider(channelProvider)
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = DocumentsClient.create(settings);
}
@After
public void tearDown() throws Exception {
client.close();
}
@Test
public void listDocumentsTest() throws Exception {
Document responsesElement = Document.newBuilder().build();
ListDocumentsResponse expectedResponse =
ListDocumentsResponse.newBuilder()
.setNextPageToken("")
.addAllDocuments(Arrays.asList(responsesElement))
.build();
mockDocuments.addResponse(expectedResponse);
KnowledgeBaseName parent =
KnowledgeBaseName.ofProjectKnowledgeBaseName("[PROJECT]", "[KNOWLEDGE_BASE]");
ListDocumentsPagedResponse pagedListResponse = client.listDocuments(parent);
List<Document> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getDocumentsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListDocumentsRequest actualRequest = ((ListDocumentsRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listDocumentsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
KnowledgeBaseName parent =
KnowledgeBaseName.ofProjectKnowledgeBaseName("[PROJECT]", "[KNOWLEDGE_BASE]");
client.listDocuments(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listDocumentsTest2() throws Exception {
Document responsesElement = Document.newBuilder().build();
ListDocumentsResponse expectedResponse =
ListDocumentsResponse.newBuilder()
.setNextPageToken("")
.addAllDocuments(Arrays.asList(responsesElement))
.build();
mockDocuments.addResponse(expectedResponse);
String parent = "parent-995424086";
ListDocumentsPagedResponse pagedListResponse = client.listDocuments(parent);
List<Document> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getDocumentsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListDocumentsRequest actualRequest = ((ListDocumentsRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listDocumentsExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
String parent = "parent-995424086";
client.listDocuments(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getDocumentTest() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
mockDocuments.addResponse(expectedResponse);
DocumentName name =
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]");
Document actualResponse = client.getDocument(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetDocumentRequest actualRequest = ((GetDocumentRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getDocumentExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
DocumentName name =
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]");
client.getDocument(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getDocumentTest2() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
mockDocuments.addResponse(expectedResponse);
String name = "name3373707";
Document actualResponse = client.getDocument(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetDocumentRequest actualRequest = ((GetDocumentRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getDocumentExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
String name = "name3373707";
client.getDocument(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void createDocumentTest() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("createDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDocuments.addResponse(resultOperation);
KnowledgeBaseName parent =
KnowledgeBaseName.ofProjectKnowledgeBaseName("[PROJECT]", "[KNOWLEDGE_BASE]");
Document document = Document.newBuilder().build();
Document actualResponse = client.createDocumentAsync(parent, document).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateDocumentRequest actualRequest = ((CreateDocumentRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertEquals(document, actualRequest.getDocument());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createDocumentExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
KnowledgeBaseName parent =
KnowledgeBaseName.ofProjectKnowledgeBaseName("[PROJECT]", "[KNOWLEDGE_BASE]");
Document document = Document.newBuilder().build();
client.createDocumentAsync(parent, document).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void createDocumentTest2() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("createDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDocuments.addResponse(resultOperation);
String parent = "parent-995424086";
Document document = Document.newBuilder().build();
Document actualResponse = client.createDocumentAsync(parent, document).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateDocumentRequest actualRequest = ((CreateDocumentRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertEquals(document, actualRequest.getDocument());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createDocumentExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
String parent = "parent-995424086";
Document document = Document.newBuilder().build();
client.createDocumentAsync(parent, document).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void importDocumentsTest() throws Exception {
ImportDocumentsResponse expectedResponse =
ImportDocumentsResponse.newBuilder().addAllWarnings(new ArrayList<Status>()).build();
Operation resultOperation =
Operation.newBuilder()
.setName("importDocumentsTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDocuments.addResponse(resultOperation);
ImportDocumentsRequest request =
ImportDocumentsRequest.newBuilder()
.setParent(
KnowledgeBaseName.ofProjectKnowledgeBaseName("[PROJECT]", "[KNOWLEDGE_BASE]")
.toString())
.setDocumentTemplate(ImportDocumentTemplate.newBuilder().build())
.setImportGcsCustomMetadata(true)
.build();
ImportDocumentsResponse actualResponse = client.importDocumentsAsync(request).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
ImportDocumentsRequest actualRequest = ((ImportDocumentsRequest) actualRequests.get(0));
Assert.assertEquals(request.getParent(), actualRequest.getParent());
Assert.assertEquals(request.getGcsSource(), actualRequest.getGcsSource());
Assert.assertEquals(request.getDocumentTemplate(), actualRequest.getDocumentTemplate());
Assert.assertEquals(
request.getImportGcsCustomMetadata(), actualRequest.getImportGcsCustomMetadata());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void importDocumentsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
ImportDocumentsRequest request =
ImportDocumentsRequest.newBuilder()
.setParent(
KnowledgeBaseName.ofProjectKnowledgeBaseName("[PROJECT]", "[KNOWLEDGE_BASE]")
.toString())
.setDocumentTemplate(ImportDocumentTemplate.newBuilder().build())
.setImportGcsCustomMetadata(true)
.build();
client.importDocumentsAsync(request).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void deleteDocumentTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("deleteDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDocuments.addResponse(resultOperation);
DocumentName name =
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]");
client.deleteDocumentAsync(name).get();
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteDocumentRequest actualRequest = ((DeleteDocumentRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteDocumentExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
DocumentName name =
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]");
client.deleteDocumentAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void deleteDocumentTest2() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("deleteDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDocuments.addResponse(resultOperation);
String name = "name3373707";
client.deleteDocumentAsync(name).get();
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteDocumentRequest actualRequest = ((DeleteDocumentRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteDocumentExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
String name = "name3373707";
client.deleteDocumentAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void updateDocumentTest() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("updateDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDocuments.addResponse(resultOperation);
Document document = Document.newBuilder().build();
Document actualResponse = client.updateDocumentAsync(document).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
UpdateDocumentRequest actualRequest = ((UpdateDocumentRequest) actualRequests.get(0));
Assert.assertEquals(document, actualRequest.getDocument());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void updateDocumentExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
Document document = Document.newBuilder().build();
client.updateDocumentAsync(document).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void updateDocumentTest2() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("updateDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDocuments.addResponse(resultOperation);
Document document = Document.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
Document actualResponse = client.updateDocumentAsync(document, updateMask).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
UpdateDocumentRequest actualRequest = ((UpdateDocumentRequest) actualRequests.get(0));
Assert.assertEquals(document, actualRequest.getDocument());
Assert.assertEquals(updateMask, actualRequest.getUpdateMask());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void updateDocumentExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
Document document = Document.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateDocumentAsync(document, updateMask).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void reloadDocumentTest() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("reloadDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDocuments.addResponse(resultOperation);
DocumentName name =
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]");
GcsSource gcsSource = GcsSource.newBuilder().build();
Document actualResponse = client.reloadDocumentAsync(name, gcsSource).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
ReloadDocumentRequest actualRequest = ((ReloadDocumentRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertEquals(gcsSource, actualRequest.getGcsSource());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void reloadDocumentExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
DocumentName name =
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]");
GcsSource gcsSource = GcsSource.newBuilder().build();
client.reloadDocumentAsync(name, gcsSource).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void reloadDocumentTest2() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("reloadDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDocuments.addResponse(resultOperation);
String name = "name3373707";
GcsSource gcsSource = GcsSource.newBuilder().build();
Document actualResponse = client.reloadDocumentAsync(name, gcsSource).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
ReloadDocumentRequest actualRequest = ((ReloadDocumentRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertEquals(gcsSource, actualRequest.getGcsSource());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void reloadDocumentExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
String name = "name3373707";
GcsSource gcsSource = GcsSource.newBuilder().build();
client.reloadDocumentAsync(name, gcsSource).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void listLocationsTest() throws Exception {
Location responsesElement = Location.newBuilder().build();
ListLocationsResponse expectedResponse =
ListLocationsResponse.newBuilder()
.setNextPageToken("")
.addAllLocations(Arrays.asList(responsesElement))
.build();
mockLocations.addResponse(expectedResponse);
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
ListLocationsPagedResponse pagedListResponse = client.listLocations(request);
List<Location> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getLocationsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListLocationsRequest actualRequest = ((ListLocationsRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertEquals(request.getFilter(), actualRequest.getFilter());
Assert.assertEquals(request.getPageSize(), actualRequest.getPageSize());
Assert.assertEquals(request.getPageToken(), actualRequest.getPageToken());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listLocationsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
client.listLocations(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getLocationTest() throws Exception {
Location expectedResponse =
Location.newBuilder()
.setName("name3373707")
.setLocationId("locationId1541836720")
.setDisplayName("displayName1714148973")
.putAllLabels(new HashMap<String, String>())
.setMetadata(Any.newBuilder().build())
.build();
mockLocations.addResponse(expectedResponse);
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
Location actualResponse = client.getLocation(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetLocationRequest actualRequest = ((GetLocationRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getLocationExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
client.getLocation(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
}
|
googleapis/google-cloud-java | 35,834 | java-meet/proto-google-cloud-meet-v2/src/main/java/com/google/apps/meet/v2/ListTranscriptsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/apps/meet/v2/service.proto
// Protobuf Java Version: 3.25.8
package com.google.apps.meet.v2;
/**
*
*
* <pre>
* Response for ListTranscripts method.
* </pre>
*
* Protobuf type {@code google.apps.meet.v2.ListTranscriptsResponse}
*/
public final class ListTranscriptsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.apps.meet.v2.ListTranscriptsResponse)
ListTranscriptsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListTranscriptsResponse.newBuilder() to construct.
private ListTranscriptsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListTranscriptsResponse() {
transcripts_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListTranscriptsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.apps.meet.v2.ServiceProto
.internal_static_google_apps_meet_v2_ListTranscriptsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.apps.meet.v2.ServiceProto
.internal_static_google_apps_meet_v2_ListTranscriptsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.apps.meet.v2.ListTranscriptsResponse.class,
com.google.apps.meet.v2.ListTranscriptsResponse.Builder.class);
}
public static final int TRANSCRIPTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.apps.meet.v2.Transcript> transcripts_;
/**
*
*
* <pre>
* List of transcripts in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Transcript transcripts = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.apps.meet.v2.Transcript> getTranscriptsList() {
return transcripts_;
}
/**
*
*
* <pre>
* List of transcripts in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Transcript transcripts = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.apps.meet.v2.TranscriptOrBuilder>
getTranscriptsOrBuilderList() {
return transcripts_;
}
/**
*
*
* <pre>
* List of transcripts in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Transcript transcripts = 1;</code>
*/
@java.lang.Override
public int getTranscriptsCount() {
return transcripts_.size();
}
/**
*
*
* <pre>
* List of transcripts in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Transcript transcripts = 1;</code>
*/
@java.lang.Override
public com.google.apps.meet.v2.Transcript getTranscripts(int index) {
return transcripts_.get(index);
}
/**
*
*
* <pre>
* List of transcripts in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Transcript transcripts = 1;</code>
*/
@java.lang.Override
public com.google.apps.meet.v2.TranscriptOrBuilder getTranscriptsOrBuilder(int index) {
return transcripts_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the transcripts. Unset if all transcripts are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the transcripts. Unset if all transcripts are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < transcripts_.size(); i++) {
output.writeMessage(1, transcripts_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < transcripts_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, transcripts_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.apps.meet.v2.ListTranscriptsResponse)) {
return super.equals(obj);
}
com.google.apps.meet.v2.ListTranscriptsResponse other =
(com.google.apps.meet.v2.ListTranscriptsResponse) obj;
if (!getTranscriptsList().equals(other.getTranscriptsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getTranscriptsCount() > 0) {
hash = (37 * hash) + TRANSCRIPTS_FIELD_NUMBER;
hash = (53 * hash) + getTranscriptsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.apps.meet.v2.ListTranscriptsResponse parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.apps.meet.v2.ListTranscriptsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.apps.meet.v2.ListTranscriptsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.apps.meet.v2.ListTranscriptsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.apps.meet.v2.ListTranscriptsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.apps.meet.v2.ListTranscriptsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.apps.meet.v2.ListTranscriptsResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.apps.meet.v2.ListTranscriptsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.apps.meet.v2.ListTranscriptsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.apps.meet.v2.ListTranscriptsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.apps.meet.v2.ListTranscriptsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.apps.meet.v2.ListTranscriptsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.apps.meet.v2.ListTranscriptsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for ListTranscripts method.
* </pre>
*
* Protobuf type {@code google.apps.meet.v2.ListTranscriptsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.apps.meet.v2.ListTranscriptsResponse)
com.google.apps.meet.v2.ListTranscriptsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.apps.meet.v2.ServiceProto
.internal_static_google_apps_meet_v2_ListTranscriptsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.apps.meet.v2.ServiceProto
.internal_static_google_apps_meet_v2_ListTranscriptsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.apps.meet.v2.ListTranscriptsResponse.class,
com.google.apps.meet.v2.ListTranscriptsResponse.Builder.class);
}
// Construct using com.google.apps.meet.v2.ListTranscriptsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (transcriptsBuilder_ == null) {
transcripts_ = java.util.Collections.emptyList();
} else {
transcripts_ = null;
transcriptsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.apps.meet.v2.ServiceProto
.internal_static_google_apps_meet_v2_ListTranscriptsResponse_descriptor;
}
@java.lang.Override
public com.google.apps.meet.v2.ListTranscriptsResponse getDefaultInstanceForType() {
return com.google.apps.meet.v2.ListTranscriptsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.apps.meet.v2.ListTranscriptsResponse build() {
com.google.apps.meet.v2.ListTranscriptsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.apps.meet.v2.ListTranscriptsResponse buildPartial() {
com.google.apps.meet.v2.ListTranscriptsResponse result =
new com.google.apps.meet.v2.ListTranscriptsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.apps.meet.v2.ListTranscriptsResponse result) {
if (transcriptsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
transcripts_ = java.util.Collections.unmodifiableList(transcripts_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.transcripts_ = transcripts_;
} else {
result.transcripts_ = transcriptsBuilder_.build();
}
}
private void buildPartial0(com.google.apps.meet.v2.ListTranscriptsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.apps.meet.v2.ListTranscriptsResponse) {
return mergeFrom((com.google.apps.meet.v2.ListTranscriptsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.apps.meet.v2.ListTranscriptsResponse other) {
if (other == com.google.apps.meet.v2.ListTranscriptsResponse.getDefaultInstance())
return this;
if (transcriptsBuilder_ == null) {
if (!other.transcripts_.isEmpty()) {
if (transcripts_.isEmpty()) {
transcripts_ = other.transcripts_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTranscriptsIsMutable();
transcripts_.addAll(other.transcripts_);
}
onChanged();
}
} else {
if (!other.transcripts_.isEmpty()) {
if (transcriptsBuilder_.isEmpty()) {
transcriptsBuilder_.dispose();
transcriptsBuilder_ = null;
transcripts_ = other.transcripts_;
bitField0_ = (bitField0_ & ~0x00000001);
transcriptsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getTranscriptsFieldBuilder()
: null;
} else {
transcriptsBuilder_.addAllMessages(other.transcripts_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.apps.meet.v2.Transcript m =
input.readMessage(
com.google.apps.meet.v2.Transcript.parser(), extensionRegistry);
if (transcriptsBuilder_ == null) {
ensureTranscriptsIsMutable();
transcripts_.add(m);
} else {
transcriptsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.apps.meet.v2.Transcript> transcripts_ =
java.util.Collections.emptyList();
private void ensureTranscriptsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
transcripts_ = new java.util.ArrayList<com.google.apps.meet.v2.Transcript>(transcripts_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.apps.meet.v2.Transcript,
com.google.apps.meet.v2.Transcript.Builder,
com.google.apps.meet.v2.TranscriptOrBuilder>
transcriptsBuilder_;
/**
*
*
* <pre>
* List of transcripts in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Transcript transcripts = 1;</code>
*/
public java.util.List<com.google.apps.meet.v2.Transcript> getTranscriptsList() {
if (transcriptsBuilder_ == null) {
return java.util.Collections.unmodifiableList(transcripts_);
} else {
return transcriptsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of transcripts in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Transcript transcripts = 1;</code>
*/
public int getTranscriptsCount() {
if (transcriptsBuilder_ == null) {
return transcripts_.size();
} else {
return transcriptsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of transcripts in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Transcript transcripts = 1;</code>
*/
public com.google.apps.meet.v2.Transcript getTranscripts(int index) {
if (transcriptsBuilder_ == null) {
return transcripts_.get(index);
} else {
return transcriptsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of transcripts in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Transcript transcripts = 1;</code>
*/
public Builder setTranscripts(int index, com.google.apps.meet.v2.Transcript value) {
if (transcriptsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTranscriptsIsMutable();
transcripts_.set(index, value);
onChanged();
} else {
transcriptsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of transcripts in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Transcript transcripts = 1;</code>
*/
public Builder setTranscripts(
int index, com.google.apps.meet.v2.Transcript.Builder builderForValue) {
if (transcriptsBuilder_ == null) {
ensureTranscriptsIsMutable();
transcripts_.set(index, builderForValue.build());
onChanged();
} else {
transcriptsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of transcripts in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Transcript transcripts = 1;</code>
*/
public Builder addTranscripts(com.google.apps.meet.v2.Transcript value) {
if (transcriptsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTranscriptsIsMutable();
transcripts_.add(value);
onChanged();
} else {
transcriptsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of transcripts in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Transcript transcripts = 1;</code>
*/
public Builder addTranscripts(int index, com.google.apps.meet.v2.Transcript value) {
if (transcriptsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTranscriptsIsMutable();
transcripts_.add(index, value);
onChanged();
} else {
transcriptsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of transcripts in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Transcript transcripts = 1;</code>
*/
public Builder addTranscripts(com.google.apps.meet.v2.Transcript.Builder builderForValue) {
if (transcriptsBuilder_ == null) {
ensureTranscriptsIsMutable();
transcripts_.add(builderForValue.build());
onChanged();
} else {
transcriptsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of transcripts in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Transcript transcripts = 1;</code>
*/
public Builder addTranscripts(
int index, com.google.apps.meet.v2.Transcript.Builder builderForValue) {
if (transcriptsBuilder_ == null) {
ensureTranscriptsIsMutable();
transcripts_.add(index, builderForValue.build());
onChanged();
} else {
transcriptsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of transcripts in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Transcript transcripts = 1;</code>
*/
public Builder addAllTranscripts(
java.lang.Iterable<? extends com.google.apps.meet.v2.Transcript> values) {
if (transcriptsBuilder_ == null) {
ensureTranscriptsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, transcripts_);
onChanged();
} else {
transcriptsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of transcripts in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Transcript transcripts = 1;</code>
*/
public Builder clearTranscripts() {
if (transcriptsBuilder_ == null) {
transcripts_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
transcriptsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of transcripts in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Transcript transcripts = 1;</code>
*/
public Builder removeTranscripts(int index) {
if (transcriptsBuilder_ == null) {
ensureTranscriptsIsMutable();
transcripts_.remove(index);
onChanged();
} else {
transcriptsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of transcripts in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Transcript transcripts = 1;</code>
*/
public com.google.apps.meet.v2.Transcript.Builder getTranscriptsBuilder(int index) {
return getTranscriptsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of transcripts in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Transcript transcripts = 1;</code>
*/
public com.google.apps.meet.v2.TranscriptOrBuilder getTranscriptsOrBuilder(int index) {
if (transcriptsBuilder_ == null) {
return transcripts_.get(index);
} else {
return transcriptsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of transcripts in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Transcript transcripts = 1;</code>
*/
public java.util.List<? extends com.google.apps.meet.v2.TranscriptOrBuilder>
getTranscriptsOrBuilderList() {
if (transcriptsBuilder_ != null) {
return transcriptsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(transcripts_);
}
}
/**
*
*
* <pre>
* List of transcripts in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Transcript transcripts = 1;</code>
*/
public com.google.apps.meet.v2.Transcript.Builder addTranscriptsBuilder() {
return getTranscriptsFieldBuilder()
.addBuilder(com.google.apps.meet.v2.Transcript.getDefaultInstance());
}
/**
*
*
* <pre>
* List of transcripts in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Transcript transcripts = 1;</code>
*/
public com.google.apps.meet.v2.Transcript.Builder addTranscriptsBuilder(int index) {
return getTranscriptsFieldBuilder()
.addBuilder(index, com.google.apps.meet.v2.Transcript.getDefaultInstance());
}
/**
*
*
* <pre>
* List of transcripts in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Transcript transcripts = 1;</code>
*/
public java.util.List<com.google.apps.meet.v2.Transcript.Builder> getTranscriptsBuilderList() {
return getTranscriptsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.apps.meet.v2.Transcript,
com.google.apps.meet.v2.Transcript.Builder,
com.google.apps.meet.v2.TranscriptOrBuilder>
getTranscriptsFieldBuilder() {
if (transcriptsBuilder_ == null) {
transcriptsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.apps.meet.v2.Transcript,
com.google.apps.meet.v2.Transcript.Builder,
com.google.apps.meet.v2.TranscriptOrBuilder>(
transcripts_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
transcripts_ = null;
}
return transcriptsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the transcripts. Unset if all transcripts are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the transcripts. Unset if all transcripts are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the transcripts. Unset if all transcripts are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the transcripts. Unset if all transcripts are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the transcripts. Unset if all transcripts are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.apps.meet.v2.ListTranscriptsResponse)
}
// @@protoc_insertion_point(class_scope:google.apps.meet.v2.ListTranscriptsResponse)
private static final com.google.apps.meet.v2.ListTranscriptsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.apps.meet.v2.ListTranscriptsResponse();
}
public static com.google.apps.meet.v2.ListTranscriptsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListTranscriptsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListTranscriptsResponse>() {
@java.lang.Override
public ListTranscriptsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListTranscriptsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListTranscriptsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.apps.meet.v2.ListTranscriptsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,174 | java-language/google-cloud-language/src/main/java/com/google/cloud/language/v2/LanguageServiceClient.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.language.v2;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.language.v2.stub.LanguageServiceStub;
import com.google.cloud.language.v2.stub.LanguageServiceStubSettings;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Service Description: Provides text analysis operations such as sentiment analysis and entity
* recognition.
*
* <p>This class provides the ability to make remote calls to the backing service through method
* calls that map to API methods. Sample code to get started:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) {
* Document document = Document.newBuilder().build();
* AnalyzeSentimentResponse response = languageServiceClient.analyzeSentiment(document);
* }
* }</pre>
*
* <p>Note: close() needs to be called on the LanguageServiceClient object to clean up resources
* such as threads. In the example above, try-with-resources is used, which automatically calls
* close().
*
* <table>
* <caption>Methods</caption>
* <tr>
* <th>Method</th>
* <th>Description</th>
* <th>Method Variants</th>
* </tr>
* <tr>
* <td><p> AnalyzeSentiment</td>
* <td><p> Analyzes the sentiment of the provided text.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> analyzeSentiment(AnalyzeSentimentRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> analyzeSentiment(Document document)
* <li><p> analyzeSentiment(Document document, EncodingType encodingType)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> analyzeSentimentCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> AnalyzeEntities</td>
* <td><p> Finds named entities (currently proper names and common nouns) in the text along with entity types, probability, mentions for each entity, and other properties.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> analyzeEntities(AnalyzeEntitiesRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> analyzeEntities(Document document)
* <li><p> analyzeEntities(Document document, EncodingType encodingType)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> analyzeEntitiesCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> ClassifyText</td>
* <td><p> Classifies a document into categories.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> classifyText(ClassifyTextRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> classifyText(Document document)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> classifyTextCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> ModerateText</td>
* <td><p> Moderates a document for harmful and sensitive categories.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> moderateText(ModerateTextRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> moderateText(Document document)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> moderateTextCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> AnnotateText</td>
* <td><p> A convenience method that provides all features in one call.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> annotateText(AnnotateTextRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> annotateText(Document document, AnnotateTextRequest.Features features)
* <li><p> annotateText(Document document, AnnotateTextRequest.Features features, EncodingType encodingType)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> annotateTextCallable()
* </ul>
* </td>
* </tr>
* </table>
*
* <p>See the individual methods for example code.
*
* <p>Many parameters require resource names to be formatted in a particular way. To assist with
* these names, this class includes a format method for each type of name, and additionally a parse
* method to extract the individual identifiers contained within names that are returned.
*
* <p>This class can be customized by passing in a custom instance of LanguageServiceSettings to
* create(). For example:
*
* <p>To customize credentials:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* LanguageServiceSettings languageServiceSettings =
* LanguageServiceSettings.newBuilder()
* .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
* .build();
* LanguageServiceClient languageServiceClient =
* LanguageServiceClient.create(languageServiceSettings);
* }</pre>
*
* <p>To customize the endpoint:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* LanguageServiceSettings languageServiceSettings =
* LanguageServiceSettings.newBuilder().setEndpoint(myEndpoint).build();
* LanguageServiceClient languageServiceClient =
* LanguageServiceClient.create(languageServiceSettings);
* }</pre>
*
* <p>To use REST (HTTP1.1/JSON) transport (instead of gRPC) for sending and receiving requests over
* the wire:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* LanguageServiceSettings languageServiceSettings =
* LanguageServiceSettings.newHttpJsonBuilder().build();
* LanguageServiceClient languageServiceClient =
* LanguageServiceClient.create(languageServiceSettings);
* }</pre>
*
* <p>Please refer to the GitHub repository's samples for more quickstart code snippets.
*/
@Generated("by gapic-generator-java")
public class LanguageServiceClient implements BackgroundResource {
private final LanguageServiceSettings settings;
private final LanguageServiceStub stub;
/** Constructs an instance of LanguageServiceClient with default settings. */
public static final LanguageServiceClient create() throws IOException {
return create(LanguageServiceSettings.newBuilder().build());
}
/**
* Constructs an instance of LanguageServiceClient, using the given settings. The channels are
* created based on the settings passed in, or defaults for any settings that are not set.
*/
public static final LanguageServiceClient create(LanguageServiceSettings settings)
throws IOException {
return new LanguageServiceClient(settings);
}
/**
* Constructs an instance of LanguageServiceClient, using the given stub for making calls. This is
* for advanced usage - prefer using create(LanguageServiceSettings).
*/
public static final LanguageServiceClient create(LanguageServiceStub stub) {
return new LanguageServiceClient(stub);
}
/**
* Constructs an instance of LanguageServiceClient, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected LanguageServiceClient(LanguageServiceSettings settings) throws IOException {
this.settings = settings;
this.stub = ((LanguageServiceStubSettings) settings.getStubSettings()).createStub();
}
protected LanguageServiceClient(LanguageServiceStub stub) {
this.settings = null;
this.stub = stub;
}
public final LanguageServiceSettings getSettings() {
return settings;
}
public LanguageServiceStub getStub() {
return stub;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Analyzes the sentiment of the provided text.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) {
* Document document = Document.newBuilder().build();
* AnalyzeSentimentResponse response = languageServiceClient.analyzeSentiment(document);
* }
* }</pre>
*
* @param document Required. Input document.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final AnalyzeSentimentResponse analyzeSentiment(Document document) {
AnalyzeSentimentRequest request =
AnalyzeSentimentRequest.newBuilder().setDocument(document).build();
return analyzeSentiment(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Analyzes the sentiment of the provided text.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) {
* Document document = Document.newBuilder().build();
* EncodingType encodingType = EncodingType.forNumber(0);
* AnalyzeSentimentResponse response =
* languageServiceClient.analyzeSentiment(document, encodingType);
* }
* }</pre>
*
* @param document Required. Input document.
* @param encodingType The encoding type used by the API to calculate sentence offsets.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final AnalyzeSentimentResponse analyzeSentiment(
Document document, EncodingType encodingType) {
AnalyzeSentimentRequest request =
AnalyzeSentimentRequest.newBuilder()
.setDocument(document)
.setEncodingType(encodingType)
.build();
return analyzeSentiment(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Analyzes the sentiment of the provided text.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) {
* AnalyzeSentimentRequest request =
* AnalyzeSentimentRequest.newBuilder()
* .setDocument(Document.newBuilder().build())
* .setEncodingType(EncodingType.forNumber(0))
* .build();
* AnalyzeSentimentResponse response = languageServiceClient.analyzeSentiment(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final AnalyzeSentimentResponse analyzeSentiment(AnalyzeSentimentRequest request) {
return analyzeSentimentCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Analyzes the sentiment of the provided text.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) {
* AnalyzeSentimentRequest request =
* AnalyzeSentimentRequest.newBuilder()
* .setDocument(Document.newBuilder().build())
* .setEncodingType(EncodingType.forNumber(0))
* .build();
* ApiFuture<AnalyzeSentimentResponse> future =
* languageServiceClient.analyzeSentimentCallable().futureCall(request);
* // Do something.
* AnalyzeSentimentResponse response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<AnalyzeSentimentRequest, AnalyzeSentimentResponse>
analyzeSentimentCallable() {
return stub.analyzeSentimentCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Finds named entities (currently proper names and common nouns) in the text along with entity
* types, probability, mentions for each entity, and other properties.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) {
* Document document = Document.newBuilder().build();
* AnalyzeEntitiesResponse response = languageServiceClient.analyzeEntities(document);
* }
* }</pre>
*
* @param document Required. Input document.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final AnalyzeEntitiesResponse analyzeEntities(Document document) {
AnalyzeEntitiesRequest request =
AnalyzeEntitiesRequest.newBuilder().setDocument(document).build();
return analyzeEntities(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Finds named entities (currently proper names and common nouns) in the text along with entity
* types, probability, mentions for each entity, and other properties.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) {
* Document document = Document.newBuilder().build();
* EncodingType encodingType = EncodingType.forNumber(0);
* AnalyzeEntitiesResponse response =
* languageServiceClient.analyzeEntities(document, encodingType);
* }
* }</pre>
*
* @param document Required. Input document.
* @param encodingType The encoding type used by the API to calculate offsets.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final AnalyzeEntitiesResponse analyzeEntities(
Document document, EncodingType encodingType) {
AnalyzeEntitiesRequest request =
AnalyzeEntitiesRequest.newBuilder()
.setDocument(document)
.setEncodingType(encodingType)
.build();
return analyzeEntities(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Finds named entities (currently proper names and common nouns) in the text along with entity
* types, probability, mentions for each entity, and other properties.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) {
* AnalyzeEntitiesRequest request =
* AnalyzeEntitiesRequest.newBuilder()
* .setDocument(Document.newBuilder().build())
* .setEncodingType(EncodingType.forNumber(0))
* .build();
* AnalyzeEntitiesResponse response = languageServiceClient.analyzeEntities(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final AnalyzeEntitiesResponse analyzeEntities(AnalyzeEntitiesRequest request) {
return analyzeEntitiesCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Finds named entities (currently proper names and common nouns) in the text along with entity
* types, probability, mentions for each entity, and other properties.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) {
* AnalyzeEntitiesRequest request =
* AnalyzeEntitiesRequest.newBuilder()
* .setDocument(Document.newBuilder().build())
* .setEncodingType(EncodingType.forNumber(0))
* .build();
* ApiFuture<AnalyzeEntitiesResponse> future =
* languageServiceClient.analyzeEntitiesCallable().futureCall(request);
* // Do something.
* AnalyzeEntitiesResponse response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<AnalyzeEntitiesRequest, AnalyzeEntitiesResponse>
analyzeEntitiesCallable() {
return stub.analyzeEntitiesCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Classifies a document into categories.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) {
* Document document = Document.newBuilder().build();
* ClassifyTextResponse response = languageServiceClient.classifyText(document);
* }
* }</pre>
*
* @param document Required. Input document.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ClassifyTextResponse classifyText(Document document) {
ClassifyTextRequest request = ClassifyTextRequest.newBuilder().setDocument(document).build();
return classifyText(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Classifies a document into categories.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) {
* ClassifyTextRequest request =
* ClassifyTextRequest.newBuilder().setDocument(Document.newBuilder().build()).build();
* ClassifyTextResponse response = languageServiceClient.classifyText(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ClassifyTextResponse classifyText(ClassifyTextRequest request) {
return classifyTextCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Classifies a document into categories.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) {
* ClassifyTextRequest request =
* ClassifyTextRequest.newBuilder().setDocument(Document.newBuilder().build()).build();
* ApiFuture<ClassifyTextResponse> future =
* languageServiceClient.classifyTextCallable().futureCall(request);
* // Do something.
* ClassifyTextResponse response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<ClassifyTextRequest, ClassifyTextResponse> classifyTextCallable() {
return stub.classifyTextCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Moderates a document for harmful and sensitive categories.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) {
* Document document = Document.newBuilder().build();
* ModerateTextResponse response = languageServiceClient.moderateText(document);
* }
* }</pre>
*
* @param document Required. Input document.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ModerateTextResponse moderateText(Document document) {
ModerateTextRequest request = ModerateTextRequest.newBuilder().setDocument(document).build();
return moderateText(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Moderates a document for harmful and sensitive categories.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) {
* ModerateTextRequest request =
* ModerateTextRequest.newBuilder().setDocument(Document.newBuilder().build()).build();
* ModerateTextResponse response = languageServiceClient.moderateText(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ModerateTextResponse moderateText(ModerateTextRequest request) {
return moderateTextCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Moderates a document for harmful and sensitive categories.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) {
* ModerateTextRequest request =
* ModerateTextRequest.newBuilder().setDocument(Document.newBuilder().build()).build();
* ApiFuture<ModerateTextResponse> future =
* languageServiceClient.moderateTextCallable().futureCall(request);
* // Do something.
* ModerateTextResponse response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<ModerateTextRequest, ModerateTextResponse> moderateTextCallable() {
return stub.moderateTextCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* A convenience method that provides all features in one call.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) {
* Document document = Document.newBuilder().build();
* AnnotateTextRequest.Features features = AnnotateTextRequest.Features.newBuilder().build();
* AnnotateTextResponse response = languageServiceClient.annotateText(document, features);
* }
* }</pre>
*
* @param document Required. Input document.
* @param features Required. The enabled features.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final AnnotateTextResponse annotateText(
Document document, AnnotateTextRequest.Features features) {
AnnotateTextRequest request =
AnnotateTextRequest.newBuilder().setDocument(document).setFeatures(features).build();
return annotateText(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* A convenience method that provides all features in one call.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) {
* Document document = Document.newBuilder().build();
* AnnotateTextRequest.Features features = AnnotateTextRequest.Features.newBuilder().build();
* EncodingType encodingType = EncodingType.forNumber(0);
* AnnotateTextResponse response =
* languageServiceClient.annotateText(document, features, encodingType);
* }
* }</pre>
*
* @param document Required. Input document.
* @param features Required. The enabled features.
* @param encodingType The encoding type used by the API to calculate offsets.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final AnnotateTextResponse annotateText(
Document document, AnnotateTextRequest.Features features, EncodingType encodingType) {
AnnotateTextRequest request =
AnnotateTextRequest.newBuilder()
.setDocument(document)
.setFeatures(features)
.setEncodingType(encodingType)
.build();
return annotateText(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* A convenience method that provides all features in one call.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) {
* AnnotateTextRequest request =
* AnnotateTextRequest.newBuilder()
* .setDocument(Document.newBuilder().build())
* .setFeatures(AnnotateTextRequest.Features.newBuilder().build())
* .setEncodingType(EncodingType.forNumber(0))
* .build();
* AnnotateTextResponse response = languageServiceClient.annotateText(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final AnnotateTextResponse annotateText(AnnotateTextRequest request) {
return annotateTextCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* A convenience method that provides all features in one call.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) {
* AnnotateTextRequest request =
* AnnotateTextRequest.newBuilder()
* .setDocument(Document.newBuilder().build())
* .setFeatures(AnnotateTextRequest.Features.newBuilder().build())
* .setEncodingType(EncodingType.forNumber(0))
* .build();
* ApiFuture<AnnotateTextResponse> future =
* languageServiceClient.annotateTextCallable().futureCall(request);
* // Do something.
* AnnotateTextResponse response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<AnnotateTextRequest, AnnotateTextResponse> annotateTextCallable() {
return stub.annotateTextCallable();
}
@Override
public final void close() {
stub.close();
}
@Override
public void shutdown() {
stub.shutdown();
}
@Override
public boolean isShutdown() {
return stub.isShutdown();
}
@Override
public boolean isTerminated() {
return stub.isTerminated();
}
@Override
public void shutdownNow() {
stub.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return stub.awaitTermination(duration, unit);
}
}
|
googleapis/google-cloud-java | 35,733 | java-dialogflow/google-cloud-dialogflow/src/test/java/com/google/cloud/dialogflow/v2/DocumentsClientHttpJsonTest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dialogflow.v2;
import static com.google.cloud.dialogflow.v2.DocumentsClient.ListDocumentsPagedResponse;
import static com.google.cloud.dialogflow.v2.DocumentsClient.ListLocationsPagedResponse;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.testing.MockHttpService;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ApiException;
import com.google.api.gax.rpc.ApiExceptionFactory;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.testing.FakeStatusCode;
import com.google.cloud.dialogflow.v2.stub.HttpJsonDocumentsStub;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.collect.Lists;
import com.google.longrunning.Operation;
import com.google.protobuf.Any;
import com.google.protobuf.Empty;
import com.google.protobuf.FieldMask;
import com.google.rpc.Status;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.ExecutionException;
import javax.annotation.Generated;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@Generated("by gapic-generator-java")
public class DocumentsClientHttpJsonTest {
private static MockHttpService mockService;
private static DocumentsClient client;
@BeforeClass
public static void startStaticServer() throws IOException {
mockService =
new MockHttpService(
HttpJsonDocumentsStub.getMethodDescriptors(), DocumentsSettings.getDefaultEndpoint());
DocumentsSettings settings =
DocumentsSettings.newHttpJsonBuilder()
.setTransportChannelProvider(
DocumentsSettings.defaultHttpJsonTransportProviderBuilder()
.setHttpTransport(mockService)
.build())
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = DocumentsClient.create(settings);
}
@AfterClass
public static void stopServer() {
client.close();
}
@Before
public void setUp() {}
@After
public void tearDown() throws Exception {
mockService.reset();
}
@Test
public void listDocumentsTest() throws Exception {
Document responsesElement = Document.newBuilder().build();
ListDocumentsResponse expectedResponse =
ListDocumentsResponse.newBuilder()
.setNextPageToken("")
.addAllDocuments(Arrays.asList(responsesElement))
.build();
mockService.addResponse(expectedResponse);
KnowledgeBaseName parent =
KnowledgeBaseName.ofProjectKnowledgeBaseName("[PROJECT]", "[KNOWLEDGE_BASE]");
ListDocumentsPagedResponse pagedListResponse = client.listDocuments(parent);
List<Document> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getDocumentsList().get(0), resources.get(0));
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void listDocumentsExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
KnowledgeBaseName parent =
KnowledgeBaseName.ofProjectKnowledgeBaseName("[PROJECT]", "[KNOWLEDGE_BASE]");
client.listDocuments(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listDocumentsTest2() throws Exception {
Document responsesElement = Document.newBuilder().build();
ListDocumentsResponse expectedResponse =
ListDocumentsResponse.newBuilder()
.setNextPageToken("")
.addAllDocuments(Arrays.asList(responsesElement))
.build();
mockService.addResponse(expectedResponse);
String parent = "projects/project-8161/knowledgeBases/knowledgeBase-8161";
ListDocumentsPagedResponse pagedListResponse = client.listDocuments(parent);
List<Document> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getDocumentsList().get(0), resources.get(0));
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void listDocumentsExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String parent = "projects/project-8161/knowledgeBases/knowledgeBase-8161";
client.listDocuments(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getDocumentTest() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
mockService.addResponse(expectedResponse);
DocumentName name =
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]");
Document actualResponse = client.getDocument(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getDocumentExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
DocumentName name =
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]");
client.getDocument(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getDocumentTest2() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
mockService.addResponse(expectedResponse);
String name = "projects/project-5854/knowledgeBases/knowledgeBase-5854/documents/document-5854";
Document actualResponse = client.getDocument(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getDocumentExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String name =
"projects/project-5854/knowledgeBases/knowledgeBase-5854/documents/document-5854";
client.getDocument(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void createDocumentTest() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("createDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockService.addResponse(resultOperation);
KnowledgeBaseName parent =
KnowledgeBaseName.ofProjectKnowledgeBaseName("[PROJECT]", "[KNOWLEDGE_BASE]");
Document document = Document.newBuilder().build();
Document actualResponse = client.createDocumentAsync(parent, document).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void createDocumentExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
KnowledgeBaseName parent =
KnowledgeBaseName.ofProjectKnowledgeBaseName("[PROJECT]", "[KNOWLEDGE_BASE]");
Document document = Document.newBuilder().build();
client.createDocumentAsync(parent, document).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
}
}
@Test
public void createDocumentTest2() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("createDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockService.addResponse(resultOperation);
String parent = "projects/project-8161/knowledgeBases/knowledgeBase-8161";
Document document = Document.newBuilder().build();
Document actualResponse = client.createDocumentAsync(parent, document).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void createDocumentExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String parent = "projects/project-8161/knowledgeBases/knowledgeBase-8161";
Document document = Document.newBuilder().build();
client.createDocumentAsync(parent, document).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
}
}
@Test
public void importDocumentsTest() throws Exception {
ImportDocumentsResponse expectedResponse =
ImportDocumentsResponse.newBuilder().addAllWarnings(new ArrayList<Status>()).build();
Operation resultOperation =
Operation.newBuilder()
.setName("importDocumentsTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockService.addResponse(resultOperation);
ImportDocumentsRequest request =
ImportDocumentsRequest.newBuilder()
.setParent(
KnowledgeBaseName.ofProjectKnowledgeBaseName("[PROJECT]", "[KNOWLEDGE_BASE]")
.toString())
.setDocumentTemplate(ImportDocumentTemplate.newBuilder().build())
.setImportGcsCustomMetadata(true)
.build();
ImportDocumentsResponse actualResponse = client.importDocumentsAsync(request).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void importDocumentsExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
ImportDocumentsRequest request =
ImportDocumentsRequest.newBuilder()
.setParent(
KnowledgeBaseName.ofProjectKnowledgeBaseName("[PROJECT]", "[KNOWLEDGE_BASE]")
.toString())
.setDocumentTemplate(ImportDocumentTemplate.newBuilder().build())
.setImportGcsCustomMetadata(true)
.build();
client.importDocumentsAsync(request).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
}
}
@Test
public void deleteDocumentTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("deleteDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockService.addResponse(resultOperation);
DocumentName name =
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]");
client.deleteDocumentAsync(name).get();
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void deleteDocumentExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
DocumentName name =
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]");
client.deleteDocumentAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
}
}
@Test
public void deleteDocumentTest2() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("deleteDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockService.addResponse(resultOperation);
String name = "projects/project-5854/knowledgeBases/knowledgeBase-5854/documents/document-5854";
client.deleteDocumentAsync(name).get();
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void deleteDocumentExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String name =
"projects/project-5854/knowledgeBases/knowledgeBase-5854/documents/document-5854";
client.deleteDocumentAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
}
}
@Test
public void updateDocumentTest() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("updateDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockService.addResponse(resultOperation);
Document document =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
FieldMask updateMask = FieldMask.newBuilder().build();
Document actualResponse = client.updateDocumentAsync(document, updateMask).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void updateDocumentExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
Document document =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateDocumentAsync(document, updateMask).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
}
}
@Test
public void reloadDocumentTest() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("reloadDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockService.addResponse(resultOperation);
DocumentName name =
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]");
String contentUri = "contentUri264542771";
Document actualResponse = client.reloadDocumentAsync(name, contentUri).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void reloadDocumentExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
DocumentName name =
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]");
String contentUri = "contentUri264542771";
client.reloadDocumentAsync(name, contentUri).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
}
}
@Test
public void reloadDocumentTest2() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("reloadDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockService.addResponse(resultOperation);
String name = "projects/project-5854/knowledgeBases/knowledgeBase-5854/documents/document-5854";
String contentUri = "contentUri264542771";
Document actualResponse = client.reloadDocumentAsync(name, contentUri).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void reloadDocumentExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String name =
"projects/project-5854/knowledgeBases/knowledgeBase-5854/documents/document-5854";
String contentUri = "contentUri264542771";
client.reloadDocumentAsync(name, contentUri).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
}
}
@Test
public void exportDocumentTest() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("exportDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockService.addResponse(resultOperation);
ExportDocumentRequest request =
ExportDocumentRequest.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setExportFullContent(true)
.setSmartMessagingPartialUpdate(true)
.build();
Document actualResponse = client.exportDocumentAsync(request).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void exportDocumentExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
ExportDocumentRequest request =
ExportDocumentRequest.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setExportFullContent(true)
.setSmartMessagingPartialUpdate(true)
.build();
client.exportDocumentAsync(request).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
}
}
@Test
public void listLocationsTest() throws Exception {
Location responsesElement = Location.newBuilder().build();
ListLocationsResponse expectedResponse =
ListLocationsResponse.newBuilder()
.setNextPageToken("")
.addAllLocations(Arrays.asList(responsesElement))
.build();
mockService.addResponse(expectedResponse);
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("projects/project-3664")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
ListLocationsPagedResponse pagedListResponse = client.listLocations(request);
List<Location> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getLocationsList().get(0), resources.get(0));
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void listLocationsExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("projects/project-3664")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
client.listLocations(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getLocationTest() throws Exception {
Location expectedResponse =
Location.newBuilder()
.setName("name3373707")
.setLocationId("locationId1541836720")
.setDisplayName("displayName1714148973")
.putAllLabels(new HashMap<String, String>())
.setMetadata(Any.newBuilder().build())
.build();
mockService.addResponse(expectedResponse);
GetLocationRequest request =
GetLocationRequest.newBuilder()
.setName("projects/project-9062/locations/location-9062")
.build();
Location actualResponse = client.getLocation(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getLocationExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
GetLocationRequest request =
GetLocationRequest.newBuilder()
.setName("projects/project-9062/locations/location-9062")
.build();
client.getLocation(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
}
|
googleapis/google-cloud-java | 35,842 | java-resourcemanager/proto-google-cloud-resourcemanager-v3/src/main/java/com/google/cloud/resourcemanager/v3/ListFoldersRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/resourcemanager/v3/folders.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.resourcemanager.v3;
/**
*
*
* <pre>
* The ListFolders request message.
* </pre>
*
* Protobuf type {@code google.cloud.resourcemanager.v3.ListFoldersRequest}
*/
public final class ListFoldersRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.resourcemanager.v3.ListFoldersRequest)
ListFoldersRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListFoldersRequest.newBuilder() to construct.
private ListFoldersRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListFoldersRequest() {
parent_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListFoldersRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.resourcemanager.v3.FoldersProto
.internal_static_google_cloud_resourcemanager_v3_ListFoldersRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.resourcemanager.v3.FoldersProto
.internal_static_google_cloud_resourcemanager_v3_ListFoldersRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.resourcemanager.v3.ListFoldersRequest.class,
com.google.cloud.resourcemanager.v3.ListFoldersRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the parent resource whose folders are being listed.
* Only children of this parent resource are listed; descendants are not
* listed.
*
* If the parent is a folder, use the value `folders/{folder_id}`. If the
* parent is an organization, use the value `organizations/{org_id}`.
*
* Access to this method is controlled by checking the
* `resourcemanager.folders.list` permission on the `parent`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the parent resource whose folders are being listed.
* Only children of this parent resource are listed; descendants are not
* listed.
*
* If the parent is a folder, use the value `folders/{folder_id}`. If the
* parent is an organization, use the value `organizations/{org_id}`.
*
* Access to this method is controlled by checking the
* `resourcemanager.folders.list` permission on the `parent`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. The maximum number of folders to return in the response. The
* server can return fewer folders than requested. If unspecified, server
* picks an appropriate default.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous call to `ListFolders`
* that indicates where this listing should continue from.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous call to `ListFolders`
* that indicates where this listing should continue from.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SHOW_DELETED_FIELD_NUMBER = 4;
private boolean showDeleted_ = false;
/**
*
*
* <pre>
* Optional. Controls whether folders in the
* [DELETE_REQUESTED][google.cloud.resourcemanager.v3.Folder.State.DELETE_REQUESTED]
* state should be returned. Defaults to false.
* </pre>
*
* <code>bool show_deleted = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The showDeleted.
*/
@java.lang.Override
public boolean getShowDeleted() {
return showDeleted_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (showDeleted_ != false) {
output.writeBool(4, showDeleted_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (showDeleted_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(4, showDeleted_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.resourcemanager.v3.ListFoldersRequest)) {
return super.equals(obj);
}
com.google.cloud.resourcemanager.v3.ListFoldersRequest other =
(com.google.cloud.resourcemanager.v3.ListFoldersRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (getShowDeleted() != other.getShowDeleted()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + SHOW_DELETED_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getShowDeleted());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.resourcemanager.v3.ListFoldersRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.resourcemanager.v3.ListFoldersRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.resourcemanager.v3.ListFoldersRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.resourcemanager.v3.ListFoldersRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.resourcemanager.v3.ListFoldersRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.resourcemanager.v3.ListFoldersRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.resourcemanager.v3.ListFoldersRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.resourcemanager.v3.ListFoldersRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.resourcemanager.v3.ListFoldersRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.resourcemanager.v3.ListFoldersRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.resourcemanager.v3.ListFoldersRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.resourcemanager.v3.ListFoldersRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.resourcemanager.v3.ListFoldersRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The ListFolders request message.
* </pre>
*
* Protobuf type {@code google.cloud.resourcemanager.v3.ListFoldersRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.resourcemanager.v3.ListFoldersRequest)
com.google.cloud.resourcemanager.v3.ListFoldersRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.resourcemanager.v3.FoldersProto
.internal_static_google_cloud_resourcemanager_v3_ListFoldersRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.resourcemanager.v3.FoldersProto
.internal_static_google_cloud_resourcemanager_v3_ListFoldersRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.resourcemanager.v3.ListFoldersRequest.class,
com.google.cloud.resourcemanager.v3.ListFoldersRequest.Builder.class);
}
// Construct using com.google.cloud.resourcemanager.v3.ListFoldersRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
showDeleted_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.resourcemanager.v3.FoldersProto
.internal_static_google_cloud_resourcemanager_v3_ListFoldersRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.resourcemanager.v3.ListFoldersRequest getDefaultInstanceForType() {
return com.google.cloud.resourcemanager.v3.ListFoldersRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.resourcemanager.v3.ListFoldersRequest build() {
com.google.cloud.resourcemanager.v3.ListFoldersRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.resourcemanager.v3.ListFoldersRequest buildPartial() {
com.google.cloud.resourcemanager.v3.ListFoldersRequest result =
new com.google.cloud.resourcemanager.v3.ListFoldersRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.resourcemanager.v3.ListFoldersRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.showDeleted_ = showDeleted_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.resourcemanager.v3.ListFoldersRequest) {
return mergeFrom((com.google.cloud.resourcemanager.v3.ListFoldersRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.resourcemanager.v3.ListFoldersRequest other) {
if (other == com.google.cloud.resourcemanager.v3.ListFoldersRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (other.getShowDeleted() != false) {
setShowDeleted(other.getShowDeleted());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 32:
{
showDeleted_ = input.readBool();
bitField0_ |= 0x00000008;
break;
} // case 32
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the parent resource whose folders are being listed.
* Only children of this parent resource are listed; descendants are not
* listed.
*
* If the parent is a folder, use the value `folders/{folder_id}`. If the
* parent is an organization, use the value `organizations/{org_id}`.
*
* Access to this method is controlled by checking the
* `resourcemanager.folders.list` permission on the `parent`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the parent resource whose folders are being listed.
* Only children of this parent resource are listed; descendants are not
* listed.
*
* If the parent is a folder, use the value `folders/{folder_id}`. If the
* parent is an organization, use the value `organizations/{org_id}`.
*
* Access to this method is controlled by checking the
* `resourcemanager.folders.list` permission on the `parent`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the parent resource whose folders are being listed.
* Only children of this parent resource are listed; descendants are not
* listed.
*
* If the parent is a folder, use the value `folders/{folder_id}`. If the
* parent is an organization, use the value `organizations/{org_id}`.
*
* Access to this method is controlled by checking the
* `resourcemanager.folders.list` permission on the `parent`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the parent resource whose folders are being listed.
* Only children of this parent resource are listed; descendants are not
* listed.
*
* If the parent is a folder, use the value `folders/{folder_id}`. If the
* parent is an organization, use the value `organizations/{org_id}`.
*
* Access to this method is controlled by checking the
* `resourcemanager.folders.list` permission on the `parent`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the parent resource whose folders are being listed.
* Only children of this parent resource are listed; descendants are not
* listed.
*
* If the parent is a folder, use the value `folders/{folder_id}`. If the
* parent is an organization, use the value `organizations/{org_id}`.
*
* Access to this method is controlled by checking the
* `resourcemanager.folders.list` permission on the `parent`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. The maximum number of folders to return in the response. The
* server can return fewer folders than requested. If unspecified, server
* picks an appropriate default.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. The maximum number of folders to return in the response. The
* server can return fewer folders than requested. If unspecified, server
* picks an appropriate default.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum number of folders to return in the response. The
* server can return fewer folders than requested. If unspecified, server
* picks an appropriate default.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous call to `ListFolders`
* that indicates where this listing should continue from.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous call to `ListFolders`
* that indicates where this listing should continue from.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous call to `ListFolders`
* that indicates where this listing should continue from.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous call to `ListFolders`
* that indicates where this listing should continue from.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous call to `ListFolders`
* that indicates where this listing should continue from.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private boolean showDeleted_;
/**
*
*
* <pre>
* Optional. Controls whether folders in the
* [DELETE_REQUESTED][google.cloud.resourcemanager.v3.Folder.State.DELETE_REQUESTED]
* state should be returned. Defaults to false.
* </pre>
*
* <code>bool show_deleted = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The showDeleted.
*/
@java.lang.Override
public boolean getShowDeleted() {
return showDeleted_;
}
/**
*
*
* <pre>
* Optional. Controls whether folders in the
* [DELETE_REQUESTED][google.cloud.resourcemanager.v3.Folder.State.DELETE_REQUESTED]
* state should be returned. Defaults to false.
* </pre>
*
* <code>bool show_deleted = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The showDeleted to set.
* @return This builder for chaining.
*/
public Builder setShowDeleted(boolean value) {
showDeleted_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Controls whether folders in the
* [DELETE_REQUESTED][google.cloud.resourcemanager.v3.Folder.State.DELETE_REQUESTED]
* state should be returned. Defaults to false.
* </pre>
*
* <code>bool show_deleted = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearShowDeleted() {
bitField0_ = (bitField0_ & ~0x00000008);
showDeleted_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.resourcemanager.v3.ListFoldersRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.resourcemanager.v3.ListFoldersRequest)
private static final com.google.cloud.resourcemanager.v3.ListFoldersRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.resourcemanager.v3.ListFoldersRequest();
}
public static com.google.cloud.resourcemanager.v3.ListFoldersRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListFoldersRequest> PARSER =
new com.google.protobuf.AbstractParser<ListFoldersRequest>() {
@java.lang.Override
public ListFoldersRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListFoldersRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListFoldersRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.resourcemanager.v3.ListFoldersRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/graal | 36,057 | substratevm/src/com.oracle.objectfile/src/com/oracle/objectfile/elf/dwarf/DwarfSectionImpl.java | /*
* Copyright (c) 2020, 2024, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020, 2020, Red Hat Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.objectfile.elf.dwarf;
import java.nio.ByteOrder;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.oracle.objectfile.BasicProgbitsSectionImpl;
import com.oracle.objectfile.BuildDependency;
import com.oracle.objectfile.LayoutDecision;
import com.oracle.objectfile.LayoutDecisionMap;
import com.oracle.objectfile.ObjectFile;
import com.oracle.objectfile.debugentry.ArrayTypeEntry;
import com.oracle.objectfile.debugentry.ClassEntry;
import com.oracle.objectfile.debugentry.CompiledMethodEntry;
import com.oracle.objectfile.debugentry.FieldEntry;
import com.oracle.objectfile.debugentry.ForeignStructTypeEntry;
import com.oracle.objectfile.debugentry.HeaderTypeEntry;
import com.oracle.objectfile.debugentry.LocalEntry;
import com.oracle.objectfile.debugentry.MethodEntry;
import com.oracle.objectfile.debugentry.PointerToTypeEntry;
import com.oracle.objectfile.debugentry.PrimitiveTypeEntry;
import com.oracle.objectfile.debugentry.TypeEntry;
import com.oracle.objectfile.debugentry.range.Range;
import com.oracle.objectfile.elf.ELFMachine;
import com.oracle.objectfile.elf.ELFObjectFile;
import com.oracle.objectfile.elf.dwarf.DwarfDebugInfo.AbbrevCode;
import com.oracle.objectfile.elf.dwarf.constants.DwarfExpressionOpcode;
import com.oracle.objectfile.elf.dwarf.constants.DwarfFlag;
import com.oracle.objectfile.elf.dwarf.constants.DwarfLocationListEntry;
import com.oracle.objectfile.elf.dwarf.constants.DwarfRangeListEntry;
import com.oracle.objectfile.elf.dwarf.constants.DwarfSectionName;
import com.oracle.objectfile.elf.dwarf.constants.DwarfTag;
import com.oracle.objectfile.elf.dwarf.constants.DwarfUnitHeader;
import com.oracle.objectfile.elf.dwarf.constants.DwarfVersion;
import jdk.graal.compiler.debug.DebugContext;
/**
* A class from which all DWARF debug sections inherit providing common behaviours.
*/
public abstract class DwarfSectionImpl extends BasicProgbitsSectionImpl {
protected final DwarfDebugInfo dwarfSections;
protected boolean debug = false;
protected long debugAddress = 0;
/**
* The name of this section.
*/
private final DwarfSectionName sectionName;
/**
* The name of the section which needs to have been created prior to creating this section.
*/
private final DwarfSectionName targetSectionName;
/**
* The layout properties of the target section which need to have been decided before the
* contents of this section can be created.
*/
private final LayoutDecision.Kind[] targetSectionKinds;
/**
* The default layout properties.
*/
private static final LayoutDecision.Kind[] defaultTargetSectionKinds = {
LayoutDecision.Kind.CONTENT,
LayoutDecision.Kind.SIZE
};
public DwarfSectionImpl(DwarfDebugInfo dwarfSections, DwarfSectionName name, DwarfSectionName targetName) {
this(dwarfSections, name, targetName, defaultTargetSectionKinds);
}
public DwarfSectionImpl(DwarfDebugInfo dwarfSections, DwarfSectionName sectionName, DwarfSectionName targetSectionName, LayoutDecision.Kind[] targetKinds) {
this.dwarfSections = dwarfSections;
this.sectionName = sectionName;
this.targetSectionName = targetSectionName;
this.targetSectionKinds = targetKinds;
}
public boolean isAArch64() {
return dwarfSections.elfMachine == ELFMachine.AArch64;
}
/**
* Creates the target byte[] array used to define the section contents.
*
* The main task of this method is to precompute the size of the debug section. given the
* complexity of the data layouts that invariably requires performing a dummy write of the
* contents, inserting bytes into a small, scratch buffer only when absolutely necessary.
* subclasses may also cache some information for use when writing the contents.
*/
public abstract void createContent();
/**
* Populates the byte[] array used to contain the section contents.
*
* In most cases this task reruns the operations performed under createContent but this time
* actually writing data to the target byte[].
*/
public abstract void writeContent(DebugContext debugContext);
/**
* Check whether the contents byte array has been sized and created. n.b. this does not imply
* that data has been written to the byte array.
*
* @return true if the contents byte array has been sized and created otherwise false.
*/
public boolean contentByteArrayCreated() {
return getContent() != null;
}
@Override
public boolean isLoadable() {
/*
* Even though we're a progbits section impl we're not actually loadable.
*/
return false;
}
private String debugSectionLogName() {
/*
* Use prefix dwarf plus the section name (which already includes a dot separator) for the
* context key. For example messages for info section will be keyed using dwarf.debug_info.
* Other info formats use their own format-specific prefix.
*/
assert getSectionName().startsWith(".debug");
return "dwarf" + getSectionName();
}
protected void enableLog(DebugContext context) {
/*
* Debug output is disabled during the first pass where we size the buffer. this is called
* to enable it during the second pass where the buffer gets written, but only if the scope
* is enabled.
*/
assert contentByteArrayCreated();
if (context.areScopesEnabled() && context.isLogEnabled()) {
debug = true;
}
}
protected void log(DebugContext context, String format, Object... args) {
if (debug) {
context.logv(DebugContext.INFO_LEVEL, format, args);
}
}
protected void verboseLog(DebugContext context, String format, Object... args) {
if (debug) {
context.logv(DebugContext.VERBOSE_LEVEL, format, args);
}
}
protected boolean littleEndian() {
return dwarfSections.getByteOrder() == ByteOrder.LITTLE_ENDIAN;
}
/*
* Base level put methods that assume a non-null buffer.
*/
protected int putByte(byte b, byte[] buffer, int p) {
int pos = p;
buffer[pos++] = b;
return pos;
}
protected int putShort(short s, byte[] buffer, int p) {
int pos = p;
if (littleEndian()) {
buffer[pos++] = (byte) (s & 0xff);
buffer[pos++] = (byte) ((s >> 8) & 0xff);
} else {
buffer[pos++] = (byte) ((s >> 8) & 0xff);
buffer[pos++] = (byte) (s & 0xff);
}
return pos;
}
protected int putInt(int i, byte[] buffer, int p) {
int pos = p;
if (littleEndian()) {
buffer[pos++] = (byte) (i & 0xff);
buffer[pos++] = (byte) ((i >> 8) & 0xff);
buffer[pos++] = (byte) ((i >> 16) & 0xff);
buffer[pos++] = (byte) ((i >> 24) & 0xff);
} else {
buffer[pos++] = (byte) ((i >> 24) & 0xff);
buffer[pos++] = (byte) ((i >> 16) & 0xff);
buffer[pos++] = (byte) ((i >> 8) & 0xff);
buffer[pos++] = (byte) (i & 0xff);
}
return pos;
}
protected int putLong(long l, byte[] buffer, int p) {
int pos = p;
if (littleEndian()) {
buffer[pos++] = (byte) (l & 0xff);
buffer[pos++] = (byte) ((l >> 8) & 0xff);
buffer[pos++] = (byte) ((l >> 16) & 0xff);
buffer[pos++] = (byte) ((l >> 24) & 0xff);
buffer[pos++] = (byte) ((l >> 32) & 0xff);
buffer[pos++] = (byte) ((l >> 40) & 0xff);
buffer[pos++] = (byte) ((l >> 48) & 0xff);
buffer[pos++] = (byte) ((l >> 56) & 0xff);
} else {
buffer[pos++] = (byte) ((l >> 56) & 0xff);
buffer[pos++] = (byte) ((l >> 48) & 0xff);
buffer[pos++] = (byte) ((l >> 40) & 0xff);
buffer[pos++] = (byte) ((l >> 32) & 0xff);
buffer[pos++] = (byte) ((l >> 16) & 0xff);
buffer[pos++] = (byte) ((l >> 24) & 0xff);
buffer[pos++] = (byte) ((l >> 8) & 0xff);
buffer[pos++] = (byte) (l & 0xff);
}
return pos;
}
protected int putCodeOffset(long l, byte[] buffer, int p) {
int pos = p;
if (dwarfSections.isRuntimeCompilation()) {
pos = writeLong(l, buffer, p);
} else {
/*
* Mark address so it is relocated relative to the start of the text segment.
*/
markRelocationSite(pos, ObjectFile.RelocationKind.DIRECT_8, DwarfSectionName.TEXT_SECTION.value(), l);
pos = writeLong(0, buffer, pos);
}
return pos;
}
protected int putHeapOffset(long l, byte[] buffer, int p) {
int pos = p;
if (dwarfSections.isRuntimeCompilation()) {
pos = writeLong(l, buffer, pos);
} else {
/*
* Mark address so it is relocated relative to the start of the heap.
*/
markRelocationSite(pos, ObjectFile.RelocationKind.DIRECT_8, DwarfDebugInfo.HEAP_BEGIN_NAME, l);
pos = writeLong(0, buffer, pos);
}
return pos;
}
protected int putDwarfSectionOffset(int offset, byte[] buffer, String referencedSectionName, int p) {
int pos = p;
if (dwarfSections.isRuntimeCompilation()) {
pos = writeInt(offset, buffer, pos);
} else {
/*
* Mark address so it is relocated relative to the start of the desired section.
*/
markRelocationSite(pos, ObjectFile.RelocationKind.DIRECT_4, referencedSectionName, offset);
pos = writeInt(0, buffer, pos);
}
return pos;
}
protected int putULEB(long val, byte[] buffer, int p) {
int pos = p;
long l = val;
for (int i = 0; i < 9; i++) {
byte b = (byte) (l & 0x7f);
l = l >>> 7;
boolean done = (l == 0);
if (!done) {
b = (byte) (b | 0x80);
}
pos = writeByte(b, buffer, pos);
if (done) {
break;
}
}
return pos;
}
protected int putSLEB(long val, byte[] buffer, int p) {
int pos = p;
long l = val;
for (int i = 0; i < 9; i++) {
byte b = (byte) (l & 0x7f);
l = l >> 7;
boolean bIsSigned = (b & 0x40) != 0;
boolean done = ((bIsSigned && l == -1) || (!bIsSigned && l == 0));
if (!done) {
b = (byte) (b | 0x80);
}
pos = writeByte(b, buffer, pos);
if (done) {
break;
}
}
return pos;
}
protected static int countUTF8Bytes(String s) {
return countUTF8Bytes(s, 0);
}
protected static int countUTF8Bytes(String s, int startChar) {
byte[] bytes = s.substring(startChar).getBytes(StandardCharsets.UTF_8);
return bytes.length;
}
protected int putUTF8StringBytes(String s, int startChar, byte[] buffer, int p) {
int pos = p;
byte[] bytes = s.substring(startChar).getBytes(StandardCharsets.UTF_8);
System.arraycopy(bytes, 0, buffer, pos, bytes.length);
pos += bytes.length;
buffer[pos++] = '\0';
return pos;
}
/*
* Common write methods that check for a null buffer.
*/
protected int writeByte(byte b, byte[] buffer, int p) {
if (buffer != null) {
return putByte(b, buffer, p);
} else {
return p + 1;
}
}
protected int writeShort(short s, byte[] buffer, int p) {
if (buffer != null) {
return putShort(s, buffer, p);
} else {
return p + 2;
}
}
protected int writeInt(int i, byte[] buffer, int p) {
if (buffer != null) {
return putInt(i, buffer, p);
} else {
return p + 4;
}
}
protected int writeLong(long l, byte[] buffer, int p) {
if (buffer != null) {
return putLong(l, buffer, p);
} else {
return p + 8;
}
}
protected int writeCodeOffset(long l, byte[] buffer, int p) {
if (buffer != null) {
return putCodeOffset(l, buffer, p);
} else {
return p + 8;
}
}
protected int writeHeapOffset(long l, byte[] buffer, int p) {
if (buffer != null) {
return putHeapOffset(l, buffer, p);
} else {
return p + 8;
}
}
protected int writeULEB(long val, byte[] buffer, int p) {
if (buffer != null) {
// write to the buffer at the supplied position
return putULEB(val, buffer, p);
} else {
// write to a scratch buffer at position 0 then offset from initial pos
return p + putULEB(val, scratch, 0);
}
}
protected int writeSLEB(long val, byte[] buffer, int p) {
if (buffer != null) {
// write to the buffer at the supplied position
return putSLEB(val, buffer, p);
} else {
// write to a scratch buffer at position 0 then offset from initial pos
return p + putSLEB(val, scratch, 0);
}
}
protected int writeUTF8StringBytes(String s, byte[] buffer, int pos) {
return writeUTF8StringBytes(s, 0, buffer, pos);
}
protected int writeUTF8StringBytes(String s, int startChar, byte[] buffer, int p) {
if (buffer != null) {
return putUTF8StringBytes(s, startChar, buffer, p);
} else {
// +1 for null termination
return p + s.substring(startChar).getBytes(StandardCharsets.UTF_8).length + 1;
}
}
protected int writeExprOpcode(DwarfExpressionOpcode opcode, byte[] buffer, int p) {
return writeByte(opcode.value(), buffer, p);
}
protected int writeExprOpcodeLiteral(int offset, byte[] buffer, int p) {
byte value = DwarfExpressionOpcode.DW_OP_lit0.value();
assert offset >= 0 && offset < 0x20;
value = (byte) (value + offset);
return writeByte(value, buffer, p);
}
protected int writeExprOpcodeReg(byte reg, byte[] buffer, int p) {
byte value = DwarfExpressionOpcode.DW_OP_reg0.value();
assert reg >= 0 && reg < 0x20;
value += reg;
return writeByte(value, buffer, p);
}
protected int writeExprOpcodeBReg(byte reg, byte[] buffer, int p) {
byte value = DwarfExpressionOpcode.DW_OP_breg0.value();
assert reg >= 0 && reg < 0x20;
value += reg;
return writeByte(value, buffer, p);
}
/*
* Common write methods that rely on called methods to handle a null buffer
*/
protected void patchLength(int lengthPos, byte[] buffer, int pos) {
int length = pos - (lengthPos + 4);
writeInt(length, buffer, lengthPos);
}
protected int writeAbbrevCode(AbbrevCode code, byte[] buffer, int pos) {
return writeSLEB(code.ordinal(), buffer, pos);
}
protected int writeRangeListEntry(DwarfRangeListEntry rangeListEntry, byte[] buffer, int pos) {
return writeByte(rangeListEntry.value(), buffer, pos);
}
protected int writeLocationListEntry(DwarfLocationListEntry locationListEntry, byte[] buffer, int pos) {
return writeByte(locationListEntry.value(), buffer, pos);
}
protected int writeTag(DwarfTag dwarfTag, byte[] buffer, int pos) {
int code = dwarfTag.value();
if (code == 0) {
return writeByte((byte) 0, buffer, pos);
} else {
return writeSLEB(code, buffer, pos);
}
}
protected int writeDwarfVersion(DwarfVersion dwarfVersion, byte[] buffer, int pos) {
return writeShort(dwarfVersion.value(), buffer, pos);
}
protected int writeDwarfUnitHeader(DwarfUnitHeader dwarfUnitHeader, byte[] buffer, int pos) {
return writeByte(dwarfUnitHeader.value(), buffer, pos);
}
protected int writeTypeSignature(long typeSignature, byte[] buffer, int pos) {
return writeLong(typeSignature, buffer, pos);
}
protected int writeFlag(DwarfFlag flag, byte[] buffer, int pos) {
return writeByte(flag.value(), buffer, pos);
}
protected int writeAttrAddress(long address, byte[] buffer, int pos) {
return writeCodeOffset(address, buffer, pos);
}
@SuppressWarnings("unused")
protected int writeAttrData8(long value, byte[] buffer, int pos) {
return writeLong(value, buffer, pos);
}
protected int writeAttrData4(int value, byte[] buffer, int pos) {
return writeInt(value, buffer, pos);
}
protected int writeAttrData2(short value, byte[] buffer, int pos) {
return writeShort(value, buffer, pos);
}
protected int writeAttrData1(byte value, byte[] buffer, int pos) {
return writeByte(value, buffer, pos);
}
protected int writeInfoSectionOffset(int offset, byte[] buffer, int pos) {
return writeDwarfSectionOffset(offset, buffer, DwarfSectionName.DW_INFO_SECTION, pos);
}
protected int writeLineSectionOffset(int offset, byte[] buffer, int pos) {
return writeDwarfSectionOffset(offset, buffer, DwarfSectionName.DW_LINE_SECTION, pos);
}
protected int writeRangeListsSectionOffset(int offset, byte[] buffer, int pos) {
return writeDwarfSectionOffset(offset, buffer, DwarfSectionName.DW_RNGLISTS_SECTION, pos);
}
protected int writeAbbrevSectionOffset(int offset, byte[] buffer, int pos) {
return writeDwarfSectionOffset(offset, buffer, DwarfSectionName.DW_ABBREV_SECTION, pos);
}
protected int writeStrSectionOffset(String value, byte[] buffer, int p) {
int idx = debugStringIndex(value);
return writeStrSectionOffset(idx, buffer, p);
}
private int writeStrSectionOffset(int offset, byte[] buffer, int pos) {
return writeDwarfSectionOffset(offset, buffer, DwarfSectionName.DW_STR_SECTION, pos);
}
protected int writeLineStrSectionOffset(String value, byte[] buffer, int p) {
int idx = debugLineStringIndex(value);
return writeLineStrSectionOffset(idx, buffer, p);
}
private int writeLineStrSectionOffset(int offset, byte[] buffer, int pos) {
return writeDwarfSectionOffset(offset, buffer, DwarfSectionName.DW_LINE_STR_SECTION, pos);
}
protected int writeLocSectionOffset(int offset, byte[] buffer, int pos) {
return writeDwarfSectionOffset(offset, buffer, DwarfSectionName.DW_LOCLISTS_SECTION, pos);
}
protected int writeDwarfSectionOffset(int offset, byte[] buffer, DwarfSectionName referencedSectionName, int pos) {
// offsets to abbrev section DIEs need a relocation
// the linker uses this to update the offset when info sections are merged
if (buffer != null) {
return putDwarfSectionOffset(offset, buffer, referencedSectionName.value(), pos);
} else {
return pos + 4;
}
}
protected int writeAttrNull(byte[] buffer, int pos) {
// A null attribute is just a zero tag.
return writeTag(DwarfTag.DW_TAG_null, buffer, pos);
}
/*
* Write a heap location expression preceded by a ULEB block size count as appropriate for an
* attribute with FORM exprloc. If a heapbase register is in use the generated expression
* computes the location as a constant offset from the runtime heap base register. If a heapbase
* register is not in use it computes the location as a fixed, relocatable offset from the
* link-time heap base address.
*/
protected int writeHeapLocationExprLoc(long offset, byte[] buffer, int p) {
int pos = p;
/*
* We have to size the DWARF location expression by writing it to the scratch buffer so we
* can write its size as a ULEB before the expression itself.
*/
int size = writeHeapLocation(offset, null, 0);
/* Write the size and expression into the output buffer. */
pos = writeULEB(size, buffer, pos);
return writeHeapLocation(offset, buffer, pos);
}
/*
* Write a heap location expression preceded by a ULEB block size count as appropriate for
* location list in the debug_loc section. If a heapbase register is in use the generated
* expression computes the location as a constant offset from the runtime heap base register. If
* a heapbase register is not in use it computes the location as a fixed, relocatable offset
* from the link-time heap base address.
*/
protected int writeHeapLocationLocList(long offset, byte[] buffer, int p) {
int pos = p;
int len = 0;
int lenPos = pos;
// write dummy length
pos = writeULEB(len, buffer, pos);
int zeroPos = pos;
pos = writeHeapLocation(offset, buffer, pos);
pos = writeExprOpcode(DwarfExpressionOpcode.DW_OP_stack_value, buffer, pos);
// backpatch length
len = pos - zeroPos;
writeULEB(len, buffer, lenPos);
return pos;
}
/*
* Write a bare heap location expression as appropriate for a single location. If useHeapBase is
* true the generated expression computes the location as a constant offset from the runtime
* heap base register. If useHeapBase is false it computes the location as a fixed, relocatable
* offset from the link-time heap base address.
*/
protected int writeHeapLocation(long offset, byte[] buffer, int p) {
if (dwarfSections.useHeapBase()) {
return writeHeapLocationBaseRelative(offset, buffer, p);
} else {
return writeHeapLocationOffset(offset, buffer, p);
}
}
private int writeHeapLocationBaseRelative(long offset, byte[] buffer, int p) {
int pos = p;
/* Write a location rebasing the offset relative to the heapbase register. */
pos = writeExprOpcodeBReg(dwarfSections.getHeapbaseRegister(), buffer, pos);
return writeSLEB(offset, buffer, pos);
}
private int writeHeapLocationOffset(long offset, byte[] buffer, int p) {
int pos = p;
/* Write a relocatable address relative to the heap section start. */
pos = writeExprOpcode(DwarfExpressionOpcode.DW_OP_addr, buffer, pos);
return writeHeapOffset(offset, buffer, pos);
}
/**
* Identify the section after which this debug section needs to be ordered when sizing and
* creating content.
*
* @return the name of the preceding section.
*/
public final String targetName() {
return targetSectionName.value();
}
/**
* Identify this debug section by name.
*
* @return the name of the debug section.
*/
public final String getSectionName() {
return sectionName.value();
}
@Override
public int getOrDecideSize(Map<ObjectFile.Element, LayoutDecisionMap> alreadyDecided, int sizeHint) {
if (targetName().startsWith(".debug")) {
ObjectFile.Element previousElement = this.getElement().getOwner().elementForName(targetName());
DwarfSectionImpl previousSection = (DwarfSectionImpl) previousElement.getImpl();
assert previousSection.contentByteArrayCreated();
}
createContent();
return getContent().length;
}
@Override
public byte[] getOrDecideContent(Map<ObjectFile.Element, LayoutDecisionMap> alreadyDecided, byte[] contentHint) {
assert contentByteArrayCreated();
/*
* Ensure content byte[] has been written before calling super method.
*
* we do this in a nested debug scope derived from the one set up under the object file
* write
*/
getOwner().debugContext(debugSectionLogName(), this::writeContent);
return super.getOrDecideContent(alreadyDecided, contentHint);
}
@Override
public Set<BuildDependency> getDependencies(Map<ObjectFile.Element, LayoutDecisionMap> decisions) {
Set<BuildDependency> deps = super.getDependencies(decisions);
String targetName = targetName();
ELFObjectFile.ELFSection targetSection = (ELFObjectFile.ELFSection) getElement().getOwner().elementForName(targetName);
LayoutDecision ourContent = decisions.get(getElement()).getDecision(LayoutDecision.Kind.CONTENT);
LayoutDecision ourSize = decisions.get(getElement()).getDecision(LayoutDecision.Kind.SIZE);
for (LayoutDecision.Kind targetKind : targetSectionKinds) {
if (targetKind == LayoutDecision.Kind.SIZE) {
/* Make our size depend on the target size so we compute sizes in order. */
LayoutDecision targetDecision = decisions.get(targetSection).getDecision(targetKind);
deps.add(BuildDependency.createOrGet(ourSize, targetDecision));
} else if (targetKind == LayoutDecision.Kind.CONTENT) {
/* Make our content depend on the target content so we compute contents in order. */
LayoutDecision targetDecision = decisions.get(targetSection).getDecision(targetKind);
deps.add(BuildDependency.createOrGet(ourContent, targetDecision));
} else {
/* Make our size depend on the relevant target's property. */
LayoutDecision targetDecision = decisions.get(targetSection).getDecision(targetKind);
deps.add(BuildDependency.createOrGet(ourSize, targetDecision));
}
}
return deps;
}
/**
* A scratch buffer used during computation of a section's size.
*/
protected static final byte[] scratch = new byte[10];
/**
* Retrieve a list of all types notified via the DebugTypeInfo API.
*
* @return a list of all types notified via the DebugTypeInfo API.
*/
protected List<TypeEntry> getTypes() {
return dwarfSections.getTypes();
}
/**
* Retrieve a list of all primitive types notified via the DebugTypeInfo API.
*
* @return a list of all primitive types notified via the DebugTypeInfo API.
*/
protected List<PrimitiveTypeEntry> getPrimitiveTypes() {
return dwarfSections.getPrimitiveTypes();
}
/**
* Retrieve a list of all pointer types notified via the DebugTypeInfo API.
*
* @return a list of all pointer types notified via the DebugTypeInfo API.
*/
protected List<PointerToTypeEntry> getPointerTypes() {
return dwarfSections.getPointerTypes();
}
/**
* Retrieve a list of all pointer types notified via the DebugTypeInfo API.
*
* @return a list of all pointer types notified via the DebugTypeInfo API.
*/
protected List<ForeignStructTypeEntry> getForeignStructTypes() {
return dwarfSections.getForeignStructTypes();
}
/**
* Retrieve a list of all array types notified via the DebugTypeInfo API.
*
* @return a list of all array types notified via the DebugTypeInfo API.
*/
protected List<ArrayTypeEntry> getArrayTypes() {
return dwarfSections.getArrayTypes();
}
/**
* Retrieve the unique object header type notified via the DebugTypeInfo API.
*
* @return the unique object header type notified via the DebugTypeInfo API.
*/
protected HeaderTypeEntry headerType() {
return dwarfSections.lookupHeaderType();
}
/**
* Retrieve a list of all instance classes, including interfaces and enums, notified via the
* DebugTypeInfo API.
*
* @return a list of all instance classes notified via the DebugTypeInfo API.
*/
protected List<ClassEntry> getInstanceClasses() {
return dwarfSections.getInstanceClasses();
}
protected List<ClassEntry> getInstanceClassesWithCompilation() {
return dwarfSections.getInstanceClassesWithCompilation();
}
/**
* Retrieve a list of all compiled methods notified via the DebugTypeInfo API.
*
* @return a list of all compiled methods notified via the DebugTypeInfo API.
*/
protected List<CompiledMethodEntry> getCompiledMethods() {
return dwarfSections.getCompiledMethods();
}
protected int debugStringIndex(String str) {
if (!contentByteArrayCreated()) {
return 0;
}
return dwarfSections.debugStringIndex(str);
}
protected int debugLineStringIndex(String str) {
if (!contentByteArrayCreated()) {
return 0;
}
return dwarfSections.debugLineStringIndex(str);
}
protected String uniqueDebugString(String str) {
return dwarfSections.uniqueDebugString(str);
}
protected String uniqueDebugLineString(String str) {
return dwarfSections.uniqueDebugLineString(str);
}
protected ClassEntry lookupObjectClass() {
return dwarfSections.lookupObjectClass();
}
protected int getCUIndex(ClassEntry classEntry) {
if (!contentByteArrayCreated()) {
return 0;
}
return dwarfSections.getCUIndex(classEntry);
}
protected void setCUIndex(ClassEntry classEntry, int idx) {
dwarfSections.setCUIndex(classEntry, idx);
}
protected void setCodeRangesIndex(ClassEntry classEntry, int pos) {
dwarfSections.setCodeRangesIndex(classEntry, pos);
}
protected int getCodeRangesIndex(ClassEntry classEntry) {
if (!contentByteArrayCreated()) {
return 0;
}
return dwarfSections.getCodeRangesIndex(classEntry);
}
protected void setLocationListIndex(ClassEntry classEntry, int pos) {
dwarfSections.setLocationListIndex(classEntry, pos);
}
protected int getLocationListIndex(ClassEntry classEntry) {
return dwarfSections.getLocationListIndex(classEntry);
}
protected void setLineIndex(ClassEntry classEntry, int pos) {
dwarfSections.setLineIndex(classEntry, pos);
}
protected int getLineIndex(ClassEntry classEntry) {
if (!contentByteArrayCreated()) {
return 0;
}
return dwarfSections.getLineIndex(classEntry);
}
protected void setLinePrologueSize(ClassEntry classEntry, int pos) {
dwarfSections.setLinePrologueSize(classEntry, pos);
}
protected int getLinePrologueSize(ClassEntry classEntry) {
if (!contentByteArrayCreated()) {
return 0;
}
return dwarfSections.getLinePrologueSize(classEntry);
}
protected void setFieldDeclarationIndex(FieldEntry fieldEntry, int pos) {
dwarfSections.setFieldDeclarationIndex(fieldEntry, pos);
}
protected int getFieldDeclarationIndex(FieldEntry fieldEntry) {
if (!contentByteArrayCreated()) {
return 0;
}
return dwarfSections.getFieldDeclarationIndex(fieldEntry);
}
protected void setMethodDeclarationIndex(MethodEntry methodEntry, int pos) {
dwarfSections.setMethodDeclarationIndex(methodEntry, pos);
}
protected int getMethodDeclarationIndex(MethodEntry methodEntry) {
if (!contentByteArrayCreated()) {
return 0;
}
return dwarfSections.getMethodDeclarationIndex(methodEntry);
}
protected void setAbstractInlineMethodIndex(ClassEntry classEntry, MethodEntry methodEntry, int pos) {
dwarfSections.setAbstractInlineMethodIndex(classEntry, methodEntry, pos);
}
protected int getAbstractInlineMethodIndex(ClassEntry classEntry, MethodEntry methodEntry) {
if (!contentByteArrayCreated()) {
return 0;
}
return dwarfSections.getAbstractInlineMethodIndex(classEntry, methodEntry);
}
/**
* Record the info section offset of a local (or parameter) declaration DIE appearing as a child
* of a standard method declaration or an abstract inline method declaration.
*
* @param classEntry the class of the top level method being declared or inlined into
* @param methodEntry the method being declared or inlined.
* @param localInfo the local or param whose index is to be recorded.
* @param index the info section offset to be recorded.
*/
protected void setMethodLocalIndex(ClassEntry classEntry, MethodEntry methodEntry, LocalEntry localInfo, int index) {
dwarfSections.setMethodLocalIndex(classEntry, methodEntry, localInfo, index);
}
/**
* Retrieve the info section offset of a local (or parameter) declaration DIE appearing as a
* child of a standard method declaration or an abstract inline method declaration.
*
* @param classEntry the class of the top level method being declared or inlined into
* @param methodEntry the method being declared or imported
* @param localInfo the local or param whose index is to be retrieved.
* @return the associated info section offset.
*/
protected int getMethodLocalIndex(ClassEntry classEntry, MethodEntry methodEntry, LocalEntry localInfo) {
if (!contentByteArrayCreated()) {
return 0;
}
return dwarfSections.getMethodLocalIndex(classEntry, methodEntry, localInfo);
}
/**
* Record the info section offset of a local (or parameter) location DIE associated with a top
* level (primary) or inline method range.
*
* @param range the top level (primary) or inline range to which the local (or parameter)
* belongs.
* @param localInfo the local or param whose index is to be recorded.
* @param index the info section offset index to be recorded.
*/
protected void setRangeLocalIndex(Range range, LocalEntry localInfo, int index) {
dwarfSections.setRangeLocalIndex(range, localInfo, index);
}
/**
* Retrieve the info section offset of a local (or parameter) location DIE associated with a top
* level (primary) or inline method range.
*
* @param range the top level (primary) or inline range to which the local (or parameter)
* belongs.
* @param localInfo the local or param whose index is to be retrieved.
* @return the associated info section offset.
*/
protected int getRangeLocalIndex(Range range, LocalEntry localInfo) {
return dwarfSections.getRangeLocalIndex(range, localInfo);
}
}
|
googleapis/google-cloud-java | 35,877 | java-securesourcemanager/proto-google-cloud-securesourcemanager-v1/src/main/java/com/google/cloud/securesourcemanager/v1/BatchCreatePullRequestCommentsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/securesourcemanager/v1/secure_source_manager.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.securesourcemanager.v1;
/**
*
*
* <pre>
* The response to batch create pull request comments.
* </pre>
*
* Protobuf type {@code google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse}
*/
public final class BatchCreatePullRequestCommentsResponse
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse)
BatchCreatePullRequestCommentsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use BatchCreatePullRequestCommentsResponse.newBuilder() to construct.
private BatchCreatePullRequestCommentsResponse(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BatchCreatePullRequestCommentsResponse() {
pullRequestComments_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BatchCreatePullRequestCommentsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_BatchCreatePullRequestCommentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_BatchCreatePullRequestCommentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse.class,
com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse.Builder
.class);
}
public static final int PULL_REQUEST_COMMENTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.securesourcemanager.v1.PullRequestComment>
pullRequestComments_;
/**
*
*
* <pre>
* The list of pull request comments created.
* </pre>
*
* <code>
* repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1;
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.securesourcemanager.v1.PullRequestComment>
getPullRequestCommentsList() {
return pullRequestComments_;
}
/**
*
*
* <pre>
* The list of pull request comments created.
* </pre>
*
* <code>
* repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1;
* </code>
*/
@java.lang.Override
public java.util.List<
? extends com.google.cloud.securesourcemanager.v1.PullRequestCommentOrBuilder>
getPullRequestCommentsOrBuilderList() {
return pullRequestComments_;
}
/**
*
*
* <pre>
* The list of pull request comments created.
* </pre>
*
* <code>
* repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1;
* </code>
*/
@java.lang.Override
public int getPullRequestCommentsCount() {
return pullRequestComments_.size();
}
/**
*
*
* <pre>
* The list of pull request comments created.
* </pre>
*
* <code>
* repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.PullRequestComment getPullRequestComments(
int index) {
return pullRequestComments_.get(index);
}
/**
*
*
* <pre>
* The list of pull request comments created.
* </pre>
*
* <code>
* repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.PullRequestCommentOrBuilder
getPullRequestCommentsOrBuilder(int index) {
return pullRequestComments_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < pullRequestComments_.size(); i++) {
output.writeMessage(1, pullRequestComments_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < pullRequestComments_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(1, pullRequestComments_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof
com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse)) {
return super.equals(obj);
}
com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse other =
(com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse) obj;
if (!getPullRequestCommentsList().equals(other.getPullRequestCommentsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getPullRequestCommentsCount() > 0) {
hash = (37 * hash) + PULL_REQUEST_COMMENTS_FIELD_NUMBER;
hash = (53 * hash) + getPullRequestCommentsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response to batch create pull request comments.
* </pre>
*
* Protobuf type {@code
* google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse)
com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_BatchCreatePullRequestCommentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_BatchCreatePullRequestCommentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse.class,
com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse.Builder
.class);
}
// Construct using
// com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (pullRequestCommentsBuilder_ == null) {
pullRequestComments_ = java.util.Collections.emptyList();
} else {
pullRequestComments_ = null;
pullRequestCommentsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_BatchCreatePullRequestCommentsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse
getDefaultInstanceForType() {
return com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse build() {
com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse
buildPartial() {
com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse result =
new com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse result) {
if (pullRequestCommentsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
pullRequestComments_ = java.util.Collections.unmodifiableList(pullRequestComments_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.pullRequestComments_ = pullRequestComments_;
} else {
result.pullRequestComments_ = pullRequestCommentsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof
com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse) {
return mergeFrom(
(com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse other) {
if (other
== com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse
.getDefaultInstance()) return this;
if (pullRequestCommentsBuilder_ == null) {
if (!other.pullRequestComments_.isEmpty()) {
if (pullRequestComments_.isEmpty()) {
pullRequestComments_ = other.pullRequestComments_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensurePullRequestCommentsIsMutable();
pullRequestComments_.addAll(other.pullRequestComments_);
}
onChanged();
}
} else {
if (!other.pullRequestComments_.isEmpty()) {
if (pullRequestCommentsBuilder_.isEmpty()) {
pullRequestCommentsBuilder_.dispose();
pullRequestCommentsBuilder_ = null;
pullRequestComments_ = other.pullRequestComments_;
bitField0_ = (bitField0_ & ~0x00000001);
pullRequestCommentsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getPullRequestCommentsFieldBuilder()
: null;
} else {
pullRequestCommentsBuilder_.addAllMessages(other.pullRequestComments_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.securesourcemanager.v1.PullRequestComment m =
input.readMessage(
com.google.cloud.securesourcemanager.v1.PullRequestComment.parser(),
extensionRegistry);
if (pullRequestCommentsBuilder_ == null) {
ensurePullRequestCommentsIsMutable();
pullRequestComments_.add(m);
} else {
pullRequestCommentsBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.securesourcemanager.v1.PullRequestComment>
pullRequestComments_ = java.util.Collections.emptyList();
private void ensurePullRequestCommentsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
pullRequestComments_ =
new java.util.ArrayList<com.google.cloud.securesourcemanager.v1.PullRequestComment>(
pullRequestComments_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securesourcemanager.v1.PullRequestComment,
com.google.cloud.securesourcemanager.v1.PullRequestComment.Builder,
com.google.cloud.securesourcemanager.v1.PullRequestCommentOrBuilder>
pullRequestCommentsBuilder_;
/**
*
*
* <pre>
* The list of pull request comments created.
* </pre>
*
* <code>
* repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1;
* </code>
*/
public java.util.List<com.google.cloud.securesourcemanager.v1.PullRequestComment>
getPullRequestCommentsList() {
if (pullRequestCommentsBuilder_ == null) {
return java.util.Collections.unmodifiableList(pullRequestComments_);
} else {
return pullRequestCommentsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of pull request comments created.
* </pre>
*
* <code>
* repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1;
* </code>
*/
public int getPullRequestCommentsCount() {
if (pullRequestCommentsBuilder_ == null) {
return pullRequestComments_.size();
} else {
return pullRequestCommentsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of pull request comments created.
* </pre>
*
* <code>
* repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1;
* </code>
*/
public com.google.cloud.securesourcemanager.v1.PullRequestComment getPullRequestComments(
int index) {
if (pullRequestCommentsBuilder_ == null) {
return pullRequestComments_.get(index);
} else {
return pullRequestCommentsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of pull request comments created.
* </pre>
*
* <code>
* repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1;
* </code>
*/
public Builder setPullRequestComments(
int index, com.google.cloud.securesourcemanager.v1.PullRequestComment value) {
if (pullRequestCommentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePullRequestCommentsIsMutable();
pullRequestComments_.set(index, value);
onChanged();
} else {
pullRequestCommentsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of pull request comments created.
* </pre>
*
* <code>
* repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1;
* </code>
*/
public Builder setPullRequestComments(
int index,
com.google.cloud.securesourcemanager.v1.PullRequestComment.Builder builderForValue) {
if (pullRequestCommentsBuilder_ == null) {
ensurePullRequestCommentsIsMutable();
pullRequestComments_.set(index, builderForValue.build());
onChanged();
} else {
pullRequestCommentsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of pull request comments created.
* </pre>
*
* <code>
* repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1;
* </code>
*/
public Builder addPullRequestComments(
com.google.cloud.securesourcemanager.v1.PullRequestComment value) {
if (pullRequestCommentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePullRequestCommentsIsMutable();
pullRequestComments_.add(value);
onChanged();
} else {
pullRequestCommentsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of pull request comments created.
* </pre>
*
* <code>
* repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1;
* </code>
*/
public Builder addPullRequestComments(
int index, com.google.cloud.securesourcemanager.v1.PullRequestComment value) {
if (pullRequestCommentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePullRequestCommentsIsMutable();
pullRequestComments_.add(index, value);
onChanged();
} else {
pullRequestCommentsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of pull request comments created.
* </pre>
*
* <code>
* repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1;
* </code>
*/
public Builder addPullRequestComments(
com.google.cloud.securesourcemanager.v1.PullRequestComment.Builder builderForValue) {
if (pullRequestCommentsBuilder_ == null) {
ensurePullRequestCommentsIsMutable();
pullRequestComments_.add(builderForValue.build());
onChanged();
} else {
pullRequestCommentsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of pull request comments created.
* </pre>
*
* <code>
* repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1;
* </code>
*/
public Builder addPullRequestComments(
int index,
com.google.cloud.securesourcemanager.v1.PullRequestComment.Builder builderForValue) {
if (pullRequestCommentsBuilder_ == null) {
ensurePullRequestCommentsIsMutable();
pullRequestComments_.add(index, builderForValue.build());
onChanged();
} else {
pullRequestCommentsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of pull request comments created.
* </pre>
*
* <code>
* repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1;
* </code>
*/
public Builder addAllPullRequestComments(
java.lang.Iterable<? extends com.google.cloud.securesourcemanager.v1.PullRequestComment>
values) {
if (pullRequestCommentsBuilder_ == null) {
ensurePullRequestCommentsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, pullRequestComments_);
onChanged();
} else {
pullRequestCommentsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of pull request comments created.
* </pre>
*
* <code>
* repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1;
* </code>
*/
public Builder clearPullRequestComments() {
if (pullRequestCommentsBuilder_ == null) {
pullRequestComments_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
pullRequestCommentsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of pull request comments created.
* </pre>
*
* <code>
* repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1;
* </code>
*/
public Builder removePullRequestComments(int index) {
if (pullRequestCommentsBuilder_ == null) {
ensurePullRequestCommentsIsMutable();
pullRequestComments_.remove(index);
onChanged();
} else {
pullRequestCommentsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of pull request comments created.
* </pre>
*
* <code>
* repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1;
* </code>
*/
public com.google.cloud.securesourcemanager.v1.PullRequestComment.Builder
getPullRequestCommentsBuilder(int index) {
return getPullRequestCommentsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of pull request comments created.
* </pre>
*
* <code>
* repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1;
* </code>
*/
public com.google.cloud.securesourcemanager.v1.PullRequestCommentOrBuilder
getPullRequestCommentsOrBuilder(int index) {
if (pullRequestCommentsBuilder_ == null) {
return pullRequestComments_.get(index);
} else {
return pullRequestCommentsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of pull request comments created.
* </pre>
*
* <code>
* repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1;
* </code>
*/
public java.util.List<
? extends com.google.cloud.securesourcemanager.v1.PullRequestCommentOrBuilder>
getPullRequestCommentsOrBuilderList() {
if (pullRequestCommentsBuilder_ != null) {
return pullRequestCommentsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(pullRequestComments_);
}
}
/**
*
*
* <pre>
* The list of pull request comments created.
* </pre>
*
* <code>
* repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1;
* </code>
*/
public com.google.cloud.securesourcemanager.v1.PullRequestComment.Builder
addPullRequestCommentsBuilder() {
return getPullRequestCommentsFieldBuilder()
.addBuilder(
com.google.cloud.securesourcemanager.v1.PullRequestComment.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of pull request comments created.
* </pre>
*
* <code>
* repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1;
* </code>
*/
public com.google.cloud.securesourcemanager.v1.PullRequestComment.Builder
addPullRequestCommentsBuilder(int index) {
return getPullRequestCommentsFieldBuilder()
.addBuilder(
index,
com.google.cloud.securesourcemanager.v1.PullRequestComment.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of pull request comments created.
* </pre>
*
* <code>
* repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1;
* </code>
*/
public java.util.List<com.google.cloud.securesourcemanager.v1.PullRequestComment.Builder>
getPullRequestCommentsBuilderList() {
return getPullRequestCommentsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securesourcemanager.v1.PullRequestComment,
com.google.cloud.securesourcemanager.v1.PullRequestComment.Builder,
com.google.cloud.securesourcemanager.v1.PullRequestCommentOrBuilder>
getPullRequestCommentsFieldBuilder() {
if (pullRequestCommentsBuilder_ == null) {
pullRequestCommentsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securesourcemanager.v1.PullRequestComment,
com.google.cloud.securesourcemanager.v1.PullRequestComment.Builder,
com.google.cloud.securesourcemanager.v1.PullRequestCommentOrBuilder>(
pullRequestComments_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
pullRequestComments_ = null;
}
return pullRequestCommentsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse)
private static final com.google.cloud.securesourcemanager.v1
.BatchCreatePullRequestCommentsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse();
}
public static com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BatchCreatePullRequestCommentsResponse> PARSER =
new com.google.protobuf.AbstractParser<BatchCreatePullRequestCommentsResponse>() {
@java.lang.Override
public BatchCreatePullRequestCommentsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BatchCreatePullRequestCommentsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BatchCreatePullRequestCommentsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.BatchCreatePullRequestCommentsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,913 | java-gkehub/proto-google-cloud-gkehub-v1beta/src/main/java/com/google/cloud/gkehub/policycontroller/v1beta/ResourceRequirements.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/gkehub/policycontroller/v1beta/policycontroller.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.gkehub.policycontroller.v1beta;
/**
*
*
* <pre>
* ResourceRequirements describes the compute resource requirements.
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements}
*/
public final class ResourceRequirements extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements)
ResourceRequirementsOrBuilder {
private static final long serialVersionUID = 0L;
// Use ResourceRequirements.newBuilder() to construct.
private ResourceRequirements(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ResourceRequirements() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ResourceRequirements();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.policycontroller.v1beta.PolicyControllerProto
.internal_static_google_cloud_gkehub_policycontroller_v1beta_ResourceRequirements_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.policycontroller.v1beta.PolicyControllerProto
.internal_static_google_cloud_gkehub_policycontroller_v1beta_ResourceRequirements_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements.class,
com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements.Builder.class);
}
private int bitField0_;
public static final int LIMITS_FIELD_NUMBER = 1;
private com.google.cloud.gkehub.policycontroller.v1beta.ResourceList limits_;
/**
*
*
* <pre>
* Limits describes the maximum amount of compute resources allowed for use by
* the running container.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList limits = 1;</code>
*
* @return Whether the limits field is set.
*/
@java.lang.Override
public boolean hasLimits() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Limits describes the maximum amount of compute resources allowed for use by
* the running container.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList limits = 1;</code>
*
* @return The limits.
*/
@java.lang.Override
public com.google.cloud.gkehub.policycontroller.v1beta.ResourceList getLimits() {
return limits_ == null
? com.google.cloud.gkehub.policycontroller.v1beta.ResourceList.getDefaultInstance()
: limits_;
}
/**
*
*
* <pre>
* Limits describes the maximum amount of compute resources allowed for use by
* the running container.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList limits = 1;</code>
*/
@java.lang.Override
public com.google.cloud.gkehub.policycontroller.v1beta.ResourceListOrBuilder
getLimitsOrBuilder() {
return limits_ == null
? com.google.cloud.gkehub.policycontroller.v1beta.ResourceList.getDefaultInstance()
: limits_;
}
public static final int REQUESTS_FIELD_NUMBER = 2;
private com.google.cloud.gkehub.policycontroller.v1beta.ResourceList requests_;
/**
*
*
* <pre>
* Requests describes the amount of compute resources reserved for the
* container by the kube-scheduler.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList requests = 2;</code>
*
* @return Whether the requests field is set.
*/
@java.lang.Override
public boolean hasRequests() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Requests describes the amount of compute resources reserved for the
* container by the kube-scheduler.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList requests = 2;</code>
*
* @return The requests.
*/
@java.lang.Override
public com.google.cloud.gkehub.policycontroller.v1beta.ResourceList getRequests() {
return requests_ == null
? com.google.cloud.gkehub.policycontroller.v1beta.ResourceList.getDefaultInstance()
: requests_;
}
/**
*
*
* <pre>
* Requests describes the amount of compute resources reserved for the
* container by the kube-scheduler.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList requests = 2;</code>
*/
@java.lang.Override
public com.google.cloud.gkehub.policycontroller.v1beta.ResourceListOrBuilder
getRequestsOrBuilder() {
return requests_ == null
? com.google.cloud.gkehub.policycontroller.v1beta.ResourceList.getDefaultInstance()
: requests_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getLimits());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getRequests());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getLimits());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getRequests());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements)) {
return super.equals(obj);
}
com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements other =
(com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements) obj;
if (hasLimits() != other.hasLimits()) return false;
if (hasLimits()) {
if (!getLimits().equals(other.getLimits())) return false;
}
if (hasRequests() != other.hasRequests()) return false;
if (hasRequests()) {
if (!getRequests().equals(other.getRequests())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasLimits()) {
hash = (37 * hash) + LIMITS_FIELD_NUMBER;
hash = (53 * hash) + getLimits().hashCode();
}
if (hasRequests()) {
hash = (37 * hash) + REQUESTS_FIELD_NUMBER;
hash = (53 * hash) + getRequests().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* ResourceRequirements describes the compute resource requirements.
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements)
com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirementsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.policycontroller.v1beta.PolicyControllerProto
.internal_static_google_cloud_gkehub_policycontroller_v1beta_ResourceRequirements_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.policycontroller.v1beta.PolicyControllerProto
.internal_static_google_cloud_gkehub_policycontroller_v1beta_ResourceRequirements_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements.class,
com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements.Builder.class);
}
// Construct using
// com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getLimitsFieldBuilder();
getRequestsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
limits_ = null;
if (limitsBuilder_ != null) {
limitsBuilder_.dispose();
limitsBuilder_ = null;
}
requests_ = null;
if (requestsBuilder_ != null) {
requestsBuilder_.dispose();
requestsBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.gkehub.policycontroller.v1beta.PolicyControllerProto
.internal_static_google_cloud_gkehub_policycontroller_v1beta_ResourceRequirements_descriptor;
}
@java.lang.Override
public com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements
getDefaultInstanceForType() {
return com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements build() {
com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements buildPartial() {
com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements result =
new com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.limits_ = limitsBuilder_ == null ? limits_ : limitsBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.requests_ = requestsBuilder_ == null ? requests_ : requestsBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements) {
return mergeFrom(
(com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements other) {
if (other
== com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements
.getDefaultInstance()) return this;
if (other.hasLimits()) {
mergeLimits(other.getLimits());
}
if (other.hasRequests()) {
mergeRequests(other.getRequests());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getLimitsFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getRequestsFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.gkehub.policycontroller.v1beta.ResourceList limits_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.policycontroller.v1beta.ResourceList,
com.google.cloud.gkehub.policycontroller.v1beta.ResourceList.Builder,
com.google.cloud.gkehub.policycontroller.v1beta.ResourceListOrBuilder>
limitsBuilder_;
/**
*
*
* <pre>
* Limits describes the maximum amount of compute resources allowed for use by
* the running container.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList limits = 1;</code>
*
* @return Whether the limits field is set.
*/
public boolean hasLimits() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Limits describes the maximum amount of compute resources allowed for use by
* the running container.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList limits = 1;</code>
*
* @return The limits.
*/
public com.google.cloud.gkehub.policycontroller.v1beta.ResourceList getLimits() {
if (limitsBuilder_ == null) {
return limits_ == null
? com.google.cloud.gkehub.policycontroller.v1beta.ResourceList.getDefaultInstance()
: limits_;
} else {
return limitsBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Limits describes the maximum amount of compute resources allowed for use by
* the running container.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList limits = 1;</code>
*/
public Builder setLimits(com.google.cloud.gkehub.policycontroller.v1beta.ResourceList value) {
if (limitsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
limits_ = value;
} else {
limitsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Limits describes the maximum amount of compute resources allowed for use by
* the running container.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList limits = 1;</code>
*/
public Builder setLimits(
com.google.cloud.gkehub.policycontroller.v1beta.ResourceList.Builder builderForValue) {
if (limitsBuilder_ == null) {
limits_ = builderForValue.build();
} else {
limitsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Limits describes the maximum amount of compute resources allowed for use by
* the running container.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList limits = 1;</code>
*/
public Builder mergeLimits(com.google.cloud.gkehub.policycontroller.v1beta.ResourceList value) {
if (limitsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& limits_ != null
&& limits_
!= com.google.cloud.gkehub.policycontroller.v1beta.ResourceList
.getDefaultInstance()) {
getLimitsBuilder().mergeFrom(value);
} else {
limits_ = value;
}
} else {
limitsBuilder_.mergeFrom(value);
}
if (limits_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Limits describes the maximum amount of compute resources allowed for use by
* the running container.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList limits = 1;</code>
*/
public Builder clearLimits() {
bitField0_ = (bitField0_ & ~0x00000001);
limits_ = null;
if (limitsBuilder_ != null) {
limitsBuilder_.dispose();
limitsBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Limits describes the maximum amount of compute resources allowed for use by
* the running container.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList limits = 1;</code>
*/
public com.google.cloud.gkehub.policycontroller.v1beta.ResourceList.Builder getLimitsBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getLimitsFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Limits describes the maximum amount of compute resources allowed for use by
* the running container.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList limits = 1;</code>
*/
public com.google.cloud.gkehub.policycontroller.v1beta.ResourceListOrBuilder
getLimitsOrBuilder() {
if (limitsBuilder_ != null) {
return limitsBuilder_.getMessageOrBuilder();
} else {
return limits_ == null
? com.google.cloud.gkehub.policycontroller.v1beta.ResourceList.getDefaultInstance()
: limits_;
}
}
/**
*
*
* <pre>
* Limits describes the maximum amount of compute resources allowed for use by
* the running container.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList limits = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.policycontroller.v1beta.ResourceList,
com.google.cloud.gkehub.policycontroller.v1beta.ResourceList.Builder,
com.google.cloud.gkehub.policycontroller.v1beta.ResourceListOrBuilder>
getLimitsFieldBuilder() {
if (limitsBuilder_ == null) {
limitsBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.policycontroller.v1beta.ResourceList,
com.google.cloud.gkehub.policycontroller.v1beta.ResourceList.Builder,
com.google.cloud.gkehub.policycontroller.v1beta.ResourceListOrBuilder>(
getLimits(), getParentForChildren(), isClean());
limits_ = null;
}
return limitsBuilder_;
}
private com.google.cloud.gkehub.policycontroller.v1beta.ResourceList requests_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.policycontroller.v1beta.ResourceList,
com.google.cloud.gkehub.policycontroller.v1beta.ResourceList.Builder,
com.google.cloud.gkehub.policycontroller.v1beta.ResourceListOrBuilder>
requestsBuilder_;
/**
*
*
* <pre>
* Requests describes the amount of compute resources reserved for the
* container by the kube-scheduler.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList requests = 2;</code>
*
* @return Whether the requests field is set.
*/
public boolean hasRequests() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Requests describes the amount of compute resources reserved for the
* container by the kube-scheduler.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList requests = 2;</code>
*
* @return The requests.
*/
public com.google.cloud.gkehub.policycontroller.v1beta.ResourceList getRequests() {
if (requestsBuilder_ == null) {
return requests_ == null
? com.google.cloud.gkehub.policycontroller.v1beta.ResourceList.getDefaultInstance()
: requests_;
} else {
return requestsBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Requests describes the amount of compute resources reserved for the
* container by the kube-scheduler.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList requests = 2;</code>
*/
public Builder setRequests(com.google.cloud.gkehub.policycontroller.v1beta.ResourceList value) {
if (requestsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
requests_ = value;
} else {
requestsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Requests describes the amount of compute resources reserved for the
* container by the kube-scheduler.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList requests = 2;</code>
*/
public Builder setRequests(
com.google.cloud.gkehub.policycontroller.v1beta.ResourceList.Builder builderForValue) {
if (requestsBuilder_ == null) {
requests_ = builderForValue.build();
} else {
requestsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Requests describes the amount of compute resources reserved for the
* container by the kube-scheduler.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList requests = 2;</code>
*/
public Builder mergeRequests(
com.google.cloud.gkehub.policycontroller.v1beta.ResourceList value) {
if (requestsBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& requests_ != null
&& requests_
!= com.google.cloud.gkehub.policycontroller.v1beta.ResourceList
.getDefaultInstance()) {
getRequestsBuilder().mergeFrom(value);
} else {
requests_ = value;
}
} else {
requestsBuilder_.mergeFrom(value);
}
if (requests_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Requests describes the amount of compute resources reserved for the
* container by the kube-scheduler.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList requests = 2;</code>
*/
public Builder clearRequests() {
bitField0_ = (bitField0_ & ~0x00000002);
requests_ = null;
if (requestsBuilder_ != null) {
requestsBuilder_.dispose();
requestsBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Requests describes the amount of compute resources reserved for the
* container by the kube-scheduler.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList requests = 2;</code>
*/
public com.google.cloud.gkehub.policycontroller.v1beta.ResourceList.Builder
getRequestsBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getRequestsFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Requests describes the amount of compute resources reserved for the
* container by the kube-scheduler.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList requests = 2;</code>
*/
public com.google.cloud.gkehub.policycontroller.v1beta.ResourceListOrBuilder
getRequestsOrBuilder() {
if (requestsBuilder_ != null) {
return requestsBuilder_.getMessageOrBuilder();
} else {
return requests_ == null
? com.google.cloud.gkehub.policycontroller.v1beta.ResourceList.getDefaultInstance()
: requests_;
}
}
/**
*
*
* <pre>
* Requests describes the amount of compute resources reserved for the
* container by the kube-scheduler.
* </pre>
*
* <code>optional .google.cloud.gkehub.policycontroller.v1beta.ResourceList requests = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.policycontroller.v1beta.ResourceList,
com.google.cloud.gkehub.policycontroller.v1beta.ResourceList.Builder,
com.google.cloud.gkehub.policycontroller.v1beta.ResourceListOrBuilder>
getRequestsFieldBuilder() {
if (requestsBuilder_ == null) {
requestsBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.policycontroller.v1beta.ResourceList,
com.google.cloud.gkehub.policycontroller.v1beta.ResourceList.Builder,
com.google.cloud.gkehub.policycontroller.v1beta.ResourceListOrBuilder>(
getRequests(), getParentForChildren(), isClean());
requests_ = null;
}
return requestsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements)
}
// @@protoc_insertion_point(class_scope:google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements)
private static final com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements();
}
public static com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ResourceRequirements> PARSER =
new com.google.protobuf.AbstractParser<ResourceRequirements>() {
@java.lang.Override
public ResourceRequirements parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ResourceRequirements> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ResourceRequirements> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.gkehub.policycontroller.v1beta.ResourceRequirements
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 34,886 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/ToolProto.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/tool.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
public final class ToolProto {
private ToolProto() {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry);
}
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_Tool_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_Tool_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_Tool_GoogleSearch_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_Tool_GoogleSearch_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_Tool_CodeExecution_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_Tool_CodeExecution_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_Tool_ComputerUse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_Tool_ComputerUse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_UrlContext_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_UrlContext_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_FunctionDeclaration_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_FunctionDeclaration_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_FunctionCall_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_FunctionCall_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_FunctionResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_FunctionResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_ExecutableCode_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_ExecutableCode_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_CodeExecutionResult_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_CodeExecutionResult_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_Retrieval_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_Retrieval_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_VertexRagStore_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_VertexRagStore_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_VertexRagStore_RagResource_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_VertexRagStore_RagResource_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_VertexAISearch_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_VertexAISearch_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_VertexAISearch_DataStoreSpec_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_VertexAISearch_DataStoreSpec_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_GoogleSearchRetrieval_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_GoogleSearchRetrieval_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_GoogleMaps_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_GoogleMaps_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_EnterpriseWebSearch_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_EnterpriseWebSearch_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_DynamicRetrievalConfig_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_DynamicRetrievalConfig_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_ToolConfig_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_ToolConfig_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_FunctionCallingConfig_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_FunctionCallingConfig_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_RetrievalConfig_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_RetrievalConfig_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_Filter_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_Filter_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_Ranking_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_Ranking_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_Ranking_RankService_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_Ranking_RankService_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_Ranking_LlmRanker_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_Ranking_LlmRanker_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor descriptor;
static {
java.lang.String[] descriptorData = {
"\n"
+ "%google/cloud/aiplatform/v1/tool.proto\022"
+ "\032google.cloud.aiplatform.v1\032\037google/api/"
+ "field_behavior.proto\032\031google/api/resourc"
+ "e.proto\032(google/cloud/aiplatform/v1/open"
+ "api.proto\032\034google/protobuf/struct.proto\032\030google/type/latlng.proto\"\363"
+ "\t\n"
+ "\004Tool\022S\n"
+ "\025function_declarations\030\001 \003(\0132/.google.cloud"
+ ".aiplatform.v1.FunctionDeclarationB\003\340A\001\022=\n"
+ "\tretrieval\030\002"
+ " \001(\0132%.google.cloud.aiplatform.v1.RetrievalB\003\340A\001\022I\n\r"
+ "google_search\030\007"
+ " \001(\0132-.google.cloud.aiplatform.v1.Tool.GoogleSearchB\003\340A\001\022W\n"
+ "\027google_search_retrieval\030\003"
+ " \001(\01321.google.cloud.aiplatform.v1.GoogleSearchRetrievalB\003\340A\001\022@\n"
+ "\013google_maps\030\005"
+ " \001(\0132&.google.cloud.aiplatform.v1.GoogleMapsB\003\340A\001\022S\n"
+ "\025enterprise_web_search\030\006 "
+ "\001(\0132/.google.cloud.aiplatform.v1.EnterpriseWebSearchB\003\340A\001\022K\n"
+ "\016code_execution\030\004 \001("
+ "\0132..google.cloud.aiplatform.v1.Tool.CodeExecutionB\003\340A\001\022@\n"
+ "\013url_context\030\010 \001(\0132&.go"
+ "ogle.cloud.aiplatform.v1.UrlContextB\003\340A\001\022G\n"
+ "\014computer_use\030\013"
+ " \001(\0132,.google.cloud.aiplatform.v1.Tool.ComputerUseB\003\340A\001\032\241\001\n"
+ "\014GoogleSearch\022\034\n"
+ "\017exclude_domains\030\003 \003(\tB\003\340A\001\022[\n"
+ "\023blocking_confidence\030\004 \001(\01624.google.c"
+ "loud.aiplatform.v1.Tool.PhishBlockThresholdB\003\340A\001H\000\210\001\001B\026\n"
+ "\024_blocking_confidence\032\017\n"
+ "\r"
+ "CodeExecution\032\246\001\n"
+ "\013ComputerUse\022R\n"
+ "\013environment\030\001"
+ " \001(\01628.google.cloud.aiplatform.v1.Tool.ComputerUse.EnvironmentB\003\340A\002\"C\n"
+ "\013Environment\022\033\n"
+ "\027ENVIRONMENT_UNSPECIFIED\020\000\022\027\n"
+ "\023ENVIRONMENT_BROWSER\020\001\"\345\001\n"
+ "\023PhishBlockThreshold\022%\n"
+ "!PHISH_BLOCK_THRESHOLD_UNSPECIFIED\020\000\022\027\n"
+ "\023BLOCK_LOW_AND_ABOVE\020\036\022\032\n"
+ "\026BLOCK_MEDIUM_AND_ABOVE\020(\022\030\n"
+ "\024BLOCK_HIGH_AND_ABOVE\0202\022\032\n"
+ "\026BLOCK_HIGHER_AND_ABOVE\0207\022\035\n"
+ "\031BLOCK_VERY_HIGH_AND_ABOVE\020<\022\035\n"
+ "\031BLOCK_ONLY_EXTREMELY_HIGH\020d\"\014\n\n"
+ "UrlContext\"\262\002\n"
+ "\023FunctionDeclaration\022\021\n"
+ "\004name\030\001 \001(\tB\003\340A\002\022\030\n"
+ "\013description\030\002 \001(\tB\003\340A\001\022;\n\n"
+ "parameters\030\003 \001(\0132\".google.cloud.aiplatform.v1.SchemaB\003\340A\001\022;\n"
+ "\026parameters_json_schema\030\005"
+ " \001(\0132\026.google.protobuf.ValueB\003\340A\001\0229\n"
+ "\010response\030\004 \001(\0132\""
+ ".google.cloud.aiplatform.v1.SchemaB\003\340A\001\0229\n"
+ "\024response_json_schema\030\006"
+ " \001(\0132\026.google.protobuf.ValueB\003\340A\001\"M\n"
+ "\014FunctionCall\022\021\n"
+ "\004name\030\001 \001(\tB\003\340A\002\022*\n"
+ "\004args\030\002 \001(\0132\027.google.protobuf.StructB\003\340A\001\"U\n"
+ "\020FunctionResponse\022\021\n"
+ "\004name\030\001 \001(\tB\003\340A\002\022.\n"
+ "\010response\030\002 \001(\0132\027.google.protobuf.StructB\003\340A\002\"\241\001\n"
+ "\016ExecutableCode\022J\n"
+ "\010language\030\001 \001(\01623.google.cloud.aip"
+ "latform.v1.ExecutableCode.LanguageB\003\340A\002\022\021\n"
+ "\004code\030\002 \001(\tB\003\340A\002\"0\n"
+ "\010Language\022\030\n"
+ "\024LANGUAGE_UNSPECIFIED\020\000\022\n\n"
+ "\006PYTHON\020\001\"\340\001\n"
+ "\023CodeExecutionResult\022M\n"
+ "\007outcome\030\001 \001(\01627.google.c"
+ "loud.aiplatform.v1.CodeExecutionResult.OutcomeB\003\340A\002\022\023\n"
+ "\006output\030\002 \001(\tB\003\340A\001\"e\n"
+ "\007Outcome\022\027\n"
+ "\023OUTCOME_UNSPECIFIED\020\000\022\016\n\n"
+ "OUTCOME_OK\020\001\022\022\n"
+ "\016OUTCOME_FAILED\020\002\022\035\n"
+ "\031OUTCOME_DEADLINE_EXCEEDED\020\003\"\311\001\n"
+ "\tRetrieval\022F\n"
+ "\020vertex_ai_search\030\002"
+ " \001(\0132*.google.cloud.aiplatform.v1.VertexAISearchH\000\022F\n"
+ "\020vertex_rag_store\030\004"
+ " \001(\0132*.google.cloud.aiplatform.v1.VertexRagStoreH\000\022\"\n"
+ "\023disable_attribution\030\003 \001(\010B\005\030\001\340A\001B\010\n"
+ "\006source\"\252\003\n"
+ "\016VertexRagStore\022R\n\r"
+ "rag_resources\030\004 \003(\01326.google.cloud.aip"
+ "latform.v1.VertexRagStore.RagResourceB\003\340A\001\022$\n"
+ "\020similarity_top_k\030\002 \001(\005B\005\030\001\340A\001H\000\210\001\001\022-\n"
+ "\031vector_distance_threshold\030\003"
+ " \001(\001B\005\030\001\340A\001H\001\210\001\001\022Q\n"
+ "\024rag_retrieval_config\030\006 \001(\0132.."
+ "google.cloud.aiplatform.v1.RagRetrievalConfigB\003\340A\001\032i\n"
+ "\013RagResource\022?\n\n"
+ "rag_corpus\030\001 \001(\tB+\340A\001\372A%\n"
+ "#aiplatform.googleapis.com/RagCorpus\022\031\n"
+ "\014rag_file_ids\030\002 \003(\tB\003\340A\001B\023\n"
+ "\021_similarity_top_kB\034\n"
+ "\032_vector_distance_threshold\"\372\001\n"
+ "\016VertexAISearch\022\026\n"
+ "\tdatastore\030\001 \001(\tB\003\340A\001\022\023\n"
+ "\006engine\030\002 \001(\tB\003\340A\001\022\030\n"
+ "\013max_results\030\003 \001(\005B\003\340A\001\022\023\n"
+ "\006filter\030\004 \001(\tB\003\340A\001\022R\n"
+ "\020data_store_specs\030\005 \003(\01328.google.cloud"
+ ".aiplatform.v1.VertexAISearch.DataStoreSpec\0328\n\r"
+ "DataStoreSpec\022\022\n\n"
+ "data_store\030\001 \001(\t\022\023\n"
+ "\006filter\030\002 \001(\tB\003\340A\001\"m\n"
+ "\025GoogleSearchRetrieval\022T\n"
+ "\030dynamic_retrieval_config\030\002 \001(\013"
+ "22.google.cloud.aiplatform.v1.DynamicRetrievalConfig\"(\n\n"
+ "GoogleMaps\022\032\n\r"
+ "enable_widget\030\001 \001(\010B\003\340A\001\"\250\001\n"
+ "\023EnterpriseWebSearch\022\034\n"
+ "\017exclude_domains\030\001 \003(\tB\003\340A\001\022[\n"
+ "\023blocking_confidence\030\002 \001(\01624.google.cloud.aiplatf"
+ "orm.v1.Tool.PhishBlockThresholdB\003\340A\001H\000\210\001\001B\026\n"
+ "\024_blocking_confidence\"\312\001\n"
+ "\026DynamicRetrievalConfig\022E\n"
+ "\004mode\030\001 \001(\01627.google.clou"
+ "d.aiplatform.v1.DynamicRetrievalConfig.Mode\022#\n"
+ "\021dynamic_threshold\030\002 \001(\002B\003\340A\001H\000\210\001\001\".\n"
+ "\004Mode\022\024\n"
+ "\020MODE_UNSPECIFIED\020\000\022\020\n"
+ "\014MODE_DYNAMIC\020\001B\024\n"
+ "\022_dynamic_threshold\"\261\001\n\n"
+ "ToolConfig\022W\n"
+ "\027function_calling_config\030\001 \001(\01321"
+ ".google.cloud.aiplatform.v1.FunctionCallingConfigB\003\340A\001\022J\n"
+ "\020retrieval_config\030\002 \001(\013"
+ "2+.google.cloud.aiplatform.v1.RetrievalConfigB\003\340A\001\"\302\001\n"
+ "\025FunctionCallingConfig\022I\n"
+ "\004mode\030\001"
+ " \001(\01626.google.cloud.aiplatform.v1.FunctionCallingConfig.ModeB\003\340A\001\022#\n"
+ "\026allowed_function_names\030\002 \003(\tB\003\340A\001\"9\n"
+ "\004Mode\022\024\n"
+ "\020MODE_UNSPECIFIED\020\000\022\010\n"
+ "\004AUTO\020\001\022\007\n"
+ "\003ANY\020\002\022\010\n"
+ "\004NONE\020\003\"v\n"
+ "\017RetrievalConfig\022)\n"
+ "\007lat_lng\030\001 \001(\0132\023.google.type.LatLngH\000\210\001\001\022\032\n\r"
+ "language_code\030\002 \001(\tH\001\210\001\001B\n\n"
+ "\010_lat_lngB\020\n"
+ "\016_language_code\"\252\005\n"
+ "\022RagRetrievalConfig\022\022\n"
+ "\005top_k\030\001 \001(\005B\003\340A\001\022J\n"
+ "\006filter\030\003 \001(\01325.google.clou"
+ "d.aiplatform.v1.RagRetrievalConfig.FilterB\003\340A\001\022L\n"
+ "\007ranking\030\004 \001(\01326.google.cloud.a"
+ "iplatform.v1.RagRetrievalConfig.RankingB\003\340A\001\032\223\001\n"
+ "\006Filter\022(\n"
+ "\031vector_distance_threshold\030\003 \001(\001B\003\340A\001H\000\022*\n"
+ "\033vector_similarity_threshold\030\004 \001(\001B\003\340A\001H\000\022\034\n"
+ "\017metadata_filter\030\002 \001(\tB\003\340A\001B\025\n"
+ "\023vector_db_threshold\032\317\002\n"
+ "\007Ranking\022_\n"
+ "\014rank_service\030\001 \001(\0132B.google.cl"
+ "oud.aiplatform.v1.RagRetrievalConfig.Ranking.RankServiceB\003\340A\001H\000\022[\n\n"
+ "llm_ranker\030\003 "
+ "\001(\0132@.google.cloud.aiplatform.v1.RagRetrievalConfig.Ranking.LlmRankerB\003\340A\001H\000\032:\n"
+ "\013RankService\022\034\n\n"
+ "model_name\030\001 \001(\tB\003\340A\001H\000\210\001\001B\r\n"
+ "\013_model_name\0328\n"
+ "\tLlmRanker\022\034\n\n"
+ "model_name\030\001 \001(\tB\003\340A\001H\000\210\001\001B\r\n"
+ "\013_model_nameB\020\n"
+ "\016ranking_configB\307\001\n"
+ "\036com.google.cloud.aiplatform.v1B\tToolProtoP\001Z>cloud.google.com/g"
+ "o/aiplatform/apiv1/aiplatformpb;aiplatfo"
+ "rmpb\252\002\032Google.Cloud.AIPlatform.V1\312\002\032Goog"
+ "le\\Cloud\\AIPlatform\\V1\352\002\035Google::Cloud::AIPlatform::V1b\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.FieldBehaviorProto.getDescriptor(),
com.google.api.ResourceProto.getDescriptor(),
com.google.cloud.aiplatform.v1.OpenApiProto.getDescriptor(),
com.google.protobuf.StructProto.getDescriptor(),
com.google.type.LatLngProto.getDescriptor(),
});
internal_static_google_cloud_aiplatform_v1_Tool_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_google_cloud_aiplatform_v1_Tool_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_Tool_descriptor,
new java.lang.String[] {
"FunctionDeclarations",
"Retrieval",
"GoogleSearch",
"GoogleSearchRetrieval",
"GoogleMaps",
"EnterpriseWebSearch",
"CodeExecution",
"UrlContext",
"ComputerUse",
});
internal_static_google_cloud_aiplatform_v1_Tool_GoogleSearch_descriptor =
internal_static_google_cloud_aiplatform_v1_Tool_descriptor.getNestedTypes().get(0);
internal_static_google_cloud_aiplatform_v1_Tool_GoogleSearch_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_Tool_GoogleSearch_descriptor,
new java.lang.String[] {
"ExcludeDomains", "BlockingConfidence",
});
internal_static_google_cloud_aiplatform_v1_Tool_CodeExecution_descriptor =
internal_static_google_cloud_aiplatform_v1_Tool_descriptor.getNestedTypes().get(1);
internal_static_google_cloud_aiplatform_v1_Tool_CodeExecution_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_Tool_CodeExecution_descriptor,
new java.lang.String[] {});
internal_static_google_cloud_aiplatform_v1_Tool_ComputerUse_descriptor =
internal_static_google_cloud_aiplatform_v1_Tool_descriptor.getNestedTypes().get(2);
internal_static_google_cloud_aiplatform_v1_Tool_ComputerUse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_Tool_ComputerUse_descriptor,
new java.lang.String[] {
"Environment",
});
internal_static_google_cloud_aiplatform_v1_UrlContext_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_google_cloud_aiplatform_v1_UrlContext_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_UrlContext_descriptor,
new java.lang.String[] {});
internal_static_google_cloud_aiplatform_v1_FunctionDeclaration_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_google_cloud_aiplatform_v1_FunctionDeclaration_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_FunctionDeclaration_descriptor,
new java.lang.String[] {
"Name",
"Description",
"Parameters",
"ParametersJsonSchema",
"Response",
"ResponseJsonSchema",
});
internal_static_google_cloud_aiplatform_v1_FunctionCall_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_google_cloud_aiplatform_v1_FunctionCall_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_FunctionCall_descriptor,
new java.lang.String[] {
"Name", "Args",
});
internal_static_google_cloud_aiplatform_v1_FunctionResponse_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_google_cloud_aiplatform_v1_FunctionResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_FunctionResponse_descriptor,
new java.lang.String[] {
"Name", "Response",
});
internal_static_google_cloud_aiplatform_v1_ExecutableCode_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_google_cloud_aiplatform_v1_ExecutableCode_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_ExecutableCode_descriptor,
new java.lang.String[] {
"Language", "Code",
});
internal_static_google_cloud_aiplatform_v1_CodeExecutionResult_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_google_cloud_aiplatform_v1_CodeExecutionResult_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_CodeExecutionResult_descriptor,
new java.lang.String[] {
"Outcome", "Output",
});
internal_static_google_cloud_aiplatform_v1_Retrieval_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_google_cloud_aiplatform_v1_Retrieval_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_Retrieval_descriptor,
new java.lang.String[] {
"VertexAiSearch", "VertexRagStore", "DisableAttribution", "Source",
});
internal_static_google_cloud_aiplatform_v1_VertexRagStore_descriptor =
getDescriptor().getMessageTypes().get(8);
internal_static_google_cloud_aiplatform_v1_VertexRagStore_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_VertexRagStore_descriptor,
new java.lang.String[] {
"RagResources", "SimilarityTopK", "VectorDistanceThreshold", "RagRetrievalConfig",
});
internal_static_google_cloud_aiplatform_v1_VertexRagStore_RagResource_descriptor =
internal_static_google_cloud_aiplatform_v1_VertexRagStore_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_aiplatform_v1_VertexRagStore_RagResource_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_VertexRagStore_RagResource_descriptor,
new java.lang.String[] {
"RagCorpus", "RagFileIds",
});
internal_static_google_cloud_aiplatform_v1_VertexAISearch_descriptor =
getDescriptor().getMessageTypes().get(9);
internal_static_google_cloud_aiplatform_v1_VertexAISearch_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_VertexAISearch_descriptor,
new java.lang.String[] {
"Datastore", "Engine", "MaxResults", "Filter", "DataStoreSpecs",
});
internal_static_google_cloud_aiplatform_v1_VertexAISearch_DataStoreSpec_descriptor =
internal_static_google_cloud_aiplatform_v1_VertexAISearch_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_aiplatform_v1_VertexAISearch_DataStoreSpec_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_VertexAISearch_DataStoreSpec_descriptor,
new java.lang.String[] {
"DataStore", "Filter",
});
internal_static_google_cloud_aiplatform_v1_GoogleSearchRetrieval_descriptor =
getDescriptor().getMessageTypes().get(10);
internal_static_google_cloud_aiplatform_v1_GoogleSearchRetrieval_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_GoogleSearchRetrieval_descriptor,
new java.lang.String[] {
"DynamicRetrievalConfig",
});
internal_static_google_cloud_aiplatform_v1_GoogleMaps_descriptor =
getDescriptor().getMessageTypes().get(11);
internal_static_google_cloud_aiplatform_v1_GoogleMaps_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_GoogleMaps_descriptor,
new java.lang.String[] {
"EnableWidget",
});
internal_static_google_cloud_aiplatform_v1_EnterpriseWebSearch_descriptor =
getDescriptor().getMessageTypes().get(12);
internal_static_google_cloud_aiplatform_v1_EnterpriseWebSearch_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_EnterpriseWebSearch_descriptor,
new java.lang.String[] {
"ExcludeDomains", "BlockingConfidence",
});
internal_static_google_cloud_aiplatform_v1_DynamicRetrievalConfig_descriptor =
getDescriptor().getMessageTypes().get(13);
internal_static_google_cloud_aiplatform_v1_DynamicRetrievalConfig_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_DynamicRetrievalConfig_descriptor,
new java.lang.String[] {
"Mode", "DynamicThreshold",
});
internal_static_google_cloud_aiplatform_v1_ToolConfig_descriptor =
getDescriptor().getMessageTypes().get(14);
internal_static_google_cloud_aiplatform_v1_ToolConfig_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_ToolConfig_descriptor,
new java.lang.String[] {
"FunctionCallingConfig", "RetrievalConfig",
});
internal_static_google_cloud_aiplatform_v1_FunctionCallingConfig_descriptor =
getDescriptor().getMessageTypes().get(15);
internal_static_google_cloud_aiplatform_v1_FunctionCallingConfig_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_FunctionCallingConfig_descriptor,
new java.lang.String[] {
"Mode", "AllowedFunctionNames",
});
internal_static_google_cloud_aiplatform_v1_RetrievalConfig_descriptor =
getDescriptor().getMessageTypes().get(16);
internal_static_google_cloud_aiplatform_v1_RetrievalConfig_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_RetrievalConfig_descriptor,
new java.lang.String[] {
"LatLng", "LanguageCode",
});
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_descriptor =
getDescriptor().getMessageTypes().get(17);
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_descriptor,
new java.lang.String[] {
"TopK", "Filter", "Ranking",
});
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_Filter_descriptor =
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_Filter_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_Filter_descriptor,
new java.lang.String[] {
"VectorDistanceThreshold",
"VectorSimilarityThreshold",
"MetadataFilter",
"VectorDbThreshold",
});
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_Ranking_descriptor =
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_descriptor
.getNestedTypes()
.get(1);
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_Ranking_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_Ranking_descriptor,
new java.lang.String[] {
"RankService", "LlmRanker", "RankingConfig",
});
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_Ranking_RankService_descriptor =
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_Ranking_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_Ranking_RankService_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_Ranking_RankService_descriptor,
new java.lang.String[] {
"ModelName",
});
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_Ranking_LlmRanker_descriptor =
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_Ranking_descriptor
.getNestedTypes()
.get(1);
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_Ranking_LlmRanker_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_RagRetrievalConfig_Ranking_LlmRanker_descriptor,
new java.lang.String[] {
"ModelName",
});
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.FieldBehaviorProto.fieldBehavior);
registry.add(com.google.api.ResourceProto.resourceReference);
com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
descriptor, registry);
com.google.api.FieldBehaviorProto.getDescriptor();
com.google.api.ResourceProto.getDescriptor();
com.google.cloud.aiplatform.v1.OpenApiProto.getDescriptor();
com.google.protobuf.StructProto.getDescriptor();
com.google.type.LatLngProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
|
apache/druid | 35,251 | extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/data/input/kafkainput/KafkaInputFormatTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.data.input.kafkainput;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import org.apache.druid.data.input.ColumnsFilter;
import org.apache.druid.data.input.InputEntityReader;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.InputRowSchema;
import org.apache.druid.data.input.impl.CsvInputFormat;
import org.apache.druid.data.input.impl.DimensionsSpec;
import org.apache.druid.data.input.impl.JsonInputFormat;
import org.apache.druid.data.input.impl.TimestampSpec;
import org.apache.druid.data.input.kafka.KafkaRecordEntity;
import org.apache.druid.indexing.common.TestUtils;
import org.apache.druid.indexing.seekablestream.SettableByteEntity;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.parsers.CloseableIterator;
import org.apache.druid.java.util.common.parsers.JSONPathFieldSpec;
import org.apache.druid.java.util.common.parsers.JSONPathFieldType;
import org.apache.druid.java.util.common.parsers.JSONPathSpec;
import org.apache.druid.java.util.common.parsers.ParseException;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.header.Header;
import org.apache.kafka.common.header.Headers;
import org.apache.kafka.common.header.internals.RecordHeader;
import org.apache.kafka.common.header.internals.RecordHeaders;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Collections;
import java.util.Optional;
public class KafkaInputFormatTest
{
private static final long TIMESTAMP_MILLIS = DateTimes.of("2021-06-24").getMillis();
private static final String TOPIC = "sample";
private static final byte[] SIMPLE_JSON_KEY_BYTES = StringUtils.toUtf8(
TestUtils.singleQuoteToStandardJson(
"{'key': 'sampleKey'}"
)
);
private static final byte[] SIMPLE_JSON_VALUE_BYTES = StringUtils.toUtf8(
TestUtils.singleQuoteToStandardJson(
"{"
+ " 'timestamp': '2021-06-25',"
+ " 'bar': null,"
+ " 'foo': 'x',"
+ " 'baz': 4,"
+ " 'o': {'mg': 1}"
+ "}"
)
);
private static final Iterable<Header> SAMPLE_HEADERS = ImmutableList.of(
new Header()
{
@Override
public String key()
{
return "encoding";
}
@Override
public byte[] value()
{
return "application/json".getBytes(StandardCharsets.UTF_8);
}
},
new Header()
{
@Override
public String key()
{
return "kafkapkc";
}
@Override
public byte[] value()
{
return "pkc-bar".getBytes(StandardCharsets.UTF_8);
}
}
);
private KafkaInputFormat format;
@Before
public void setUp()
{
format = new KafkaInputFormat(
new KafkaStringHeaderFormat(null),
// Key Format
new JsonInputFormat(
new JSONPathSpec(true, ImmutableList.of()),
null,
null,
false,
false
),
// Value Format
new JsonInputFormat(
new JSONPathSpec(
true,
ImmutableList.of(
new JSONPathFieldSpec(JSONPathFieldType.ROOT, "root_baz", "baz"),
new JSONPathFieldSpec(JSONPathFieldType.ROOT, "root_baz2", "baz2"),
new JSONPathFieldSpec(JSONPathFieldType.PATH, "path_omg", "$.o.mg"),
new JSONPathFieldSpec(JSONPathFieldType.PATH, "path_omg2", "$.o.mg2"),
new JSONPathFieldSpec(JSONPathFieldType.JQ, "jq_omg", ".o.mg"),
new JSONPathFieldSpec(JSONPathFieldType.JQ, "jq_omg2", ".o.mg2")
)
),
null,
null,
false,
false
),
"kafka.newheader.",
"kafka.newkey.key",
"kafka.newts.timestamp",
"kafka.newtopic.topic"
);
}
@Test
public void testSerde() throws JsonProcessingException
{
final ObjectMapper mapper = new ObjectMapper();
KafkaInputFormat kif = new KafkaInputFormat(
new KafkaStringHeaderFormat(null),
// Key Format
new JsonInputFormat(
new JSONPathSpec(true, ImmutableList.of()),
null,
null,
false,
false
),
// Value Format
new JsonInputFormat(
new JSONPathSpec(
true,
ImmutableList.of(
new JSONPathFieldSpec(JSONPathFieldType.ROOT, "root_baz", "baz"),
new JSONPathFieldSpec(JSONPathFieldType.ROOT, "root_baz2", "baz2"),
new JSONPathFieldSpec(JSONPathFieldType.PATH, "path_omg", "$.o.mg"),
new JSONPathFieldSpec(JSONPathFieldType.PATH, "path_omg2", "$.o.mg2"),
new JSONPathFieldSpec(JSONPathFieldType.JQ, "jq_omg", ".o.mg"),
new JSONPathFieldSpec(JSONPathFieldType.JQ, "jq_omg2", ".o.mg2")
)
),
null,
null,
false,
false
),
"kafka.newheader.",
"kafka.newkey.key",
"kafka.newts.timestamp",
"kafka.newtopic.topic"
);
Assert.assertEquals(format, kif);
final byte[] formatBytes = mapper.writeValueAsBytes(format);
final byte[] kifBytes = mapper.writeValueAsBytes(kif);
Assert.assertArrayEquals(formatBytes, kifBytes);
}
@Test
public void testWithHeaderKeyAndValue() throws IOException
{
Headers headers = new RecordHeaders(SAMPLE_HEADERS);
KafkaRecordEntity inputEntity =
makeInputEntity(SIMPLE_JSON_KEY_BYTES, SIMPLE_JSON_VALUE_BYTES, headers);
final InputEntityReader reader = format.createReader(
new InputRowSchema(
new TimestampSpec("timestamp", "iso", null),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(
ImmutableList.of(
"bar",
"foo",
"kafka.newheader.encoding",
"kafka.newheader.kafkapkc",
"kafka.newts.timestamp",
"kafka.newtopic.topic"
)
)
),
ColumnsFilter.all()
),
newSettableByteEntity(inputEntity),
null
);
final int numExpectedIterations = 1;
try (CloseableIterator<InputRow> iterator = reader.read()) {
int numActualIterations = 0;
while (iterator.hasNext()) {
final InputRow row = iterator.next();
Assert.assertEquals(
Arrays.asList(
"bar",
"foo",
"kafka.newheader.encoding",
"kafka.newheader.kafkapkc",
"kafka.newts.timestamp",
"kafka.newtopic.topic"
),
row.getDimensions()
);
// Payload verifications
// this isn't super realistic, since most of these columns are not actually defined in the dimensionSpec
// but test reading them anyway since it isn't technically illegal
Assert.assertEquals(DateTimes.of("2021-06-25"), row.getTimestamp());
Assert.assertEquals("x", Iterables.getOnlyElement(row.getDimension("foo")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("baz")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("root_baz")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("path_omg")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("jq_omg")));
Assert.assertEquals(ImmutableMap.of("mg", 1L), row.getRaw("o"));
verifyHeader(row);
// Key verification
Assert.assertEquals("sampleKey", Iterables.getOnlyElement(row.getDimension("kafka.newkey.key")));
Assert.assertTrue(row.getDimension("root_baz2").isEmpty());
Assert.assertTrue(row.getDimension("path_omg2").isEmpty());
Assert.assertTrue(row.getDimension("jq_omg2").isEmpty());
numActualIterations++;
}
Assert.assertEquals(numExpectedIterations, numActualIterations);
}
}
@Test
//Headers cannot be null, so testing only no key use case!
public void testWithOutKey() throws IOException
{
Headers headers = new RecordHeaders(SAMPLE_HEADERS);
KafkaRecordEntity inputEntity = makeInputEntity(null, SIMPLE_JSON_VALUE_BYTES, headers);
final InputEntityReader reader = format.createReader(
new InputRowSchema(
new TimestampSpec("timestamp", "iso", null),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(
ImmutableList.of(
"bar",
"foo",
"kafka.newheader.encoding",
"kafka.newheader.kafkapkc",
"kafka.newts.timestamp",
"kafka.newtopic.topic"
)
)
),
ColumnsFilter.all()
),
newSettableByteEntity(inputEntity),
null
);
final int numExpectedIterations = 1;
try (CloseableIterator<InputRow> iterator = reader.read()) {
int numActualIterations = 0;
while (iterator.hasNext()) {
final InputRow row = iterator.next();
// Key verification
Assert.assertTrue(row.getDimension("kafka.newkey.key").isEmpty());
numActualIterations++;
}
Assert.assertEquals(numExpectedIterations, numActualIterations);
}
}
@Test
public void testTimestampFromHeader() throws IOException
{
final Iterable<Header> sampleHeaderWithTs = Iterables.unmodifiableIterable(
Iterables.concat(
SAMPLE_HEADERS,
ImmutableList.of(
new Header()
{
@Override
public String key()
{
return "headerTs";
}
@Override
public byte[] value()
{
return "2021-06-24".getBytes(StandardCharsets.UTF_8);
}
}
)
)
);
Headers headers = new RecordHeaders(sampleHeaderWithTs);
KafkaRecordEntity inputEntity =
makeInputEntity(SIMPLE_JSON_KEY_BYTES, SIMPLE_JSON_VALUE_BYTES, headers);
final InputEntityReader reader = format.createReader(
new InputRowSchema(
new TimestampSpec("kafka.newheader.headerTs", "iso", null),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(
ImmutableList.of(
"bar",
"foo",
"kafka.newheader.encoding",
"kafka.newheader.kafkapkc"
)
)
),
ColumnsFilter.all()
),
newSettableByteEntity(inputEntity),
null
);
final int numExpectedIterations = 1;
try (CloseableIterator<InputRow> iterator = reader.read()) {
int numActualIterations = 0;
while (iterator.hasNext()) {
final InputRow row = iterator.next();
// Payload verifications
// this isn't super realistic, since most of these columns are not actually defined in the dimensionSpec
// but test reading them anyway since it isn't technically illegal
Assert.assertEquals(DateTimes.of("2021-06-24"), row.getTimestamp());
Assert.assertEquals("x", Iterables.getOnlyElement(row.getDimension("foo")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("baz")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("root_baz")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("path_omg")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("jq_omg")));
Assert.assertEquals(ImmutableMap.of("mg", 1L), row.getRaw("o"));
verifyHeader(row);
// Key verification
Assert.assertEquals("sampleKey", Iterables.getOnlyElement(row.getDimension("kafka.newkey.key")));
Assert.assertTrue(row.getDimension("root_baz2").isEmpty());
Assert.assertTrue(row.getDimension("path_omg2").isEmpty());
Assert.assertTrue(row.getDimension("jq_omg2").isEmpty());
Assert.assertTrue(row.getDimension("jq_omg2").isEmpty());
numActualIterations++;
}
Assert.assertEquals(numExpectedIterations, numActualIterations);
}
}
@Test
public void testWithOutKeyAndHeaderSpecs() throws IOException
{
Headers headers = new RecordHeaders(SAMPLE_HEADERS);
KafkaRecordEntity inputEntity =
makeInputEntity(null, SIMPLE_JSON_VALUE_BYTES, headers);
KafkaInputFormat localFormat = new KafkaInputFormat(
null,
null,
// Value Format
new JsonInputFormat(
new JSONPathSpec(
true,
ImmutableList.of(
new JSONPathFieldSpec(JSONPathFieldType.ROOT, "root_baz", "baz"),
new JSONPathFieldSpec(JSONPathFieldType.ROOT, "root_baz2", "baz2"),
new JSONPathFieldSpec(JSONPathFieldType.PATH, "path_omg", "$.o.mg"),
new JSONPathFieldSpec(JSONPathFieldType.PATH, "path_omg2", "$.o.mg2"),
new JSONPathFieldSpec(JSONPathFieldType.JQ, "jq_omg", ".o.mg"),
new JSONPathFieldSpec(JSONPathFieldType.JQ, "jq_omg2", ".o.mg2")
)
),
null,
null,
false,
false
),
"kafka.newheader.", "kafka.newkey.", "kafka.newts.", "kafka.newtopic."
);
final InputEntityReader reader = localFormat.createReader(
new InputRowSchema(
new TimestampSpec("timestamp", "iso", null),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(
ImmutableList.of(
"bar",
"foo",
"kafka.newts.timestamp",
"kafka.newtopic.topic"
)
)
),
ColumnsFilter.all()
),
newSettableByteEntity(inputEntity),
null
);
final int numExpectedIterations = 1;
try (CloseableIterator<InputRow> iterator = reader.read()) {
int numActualIterations = 0;
while (iterator.hasNext()) {
final InputRow row = iterator.next();
// Key verification
// this isn't super realistic, since most of these columns are not actually defined in the dimensionSpec
// but test reading them anyway since it isn't technically illegal
Assert.assertTrue(row.getDimension("kafka.newkey.key").isEmpty());
Assert.assertEquals("x", Iterables.getOnlyElement(row.getDimension("foo")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("baz")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("root_baz")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("path_omg")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("jq_omg")));
Assert.assertEquals(ImmutableMap.of("mg", 1L), row.getRaw("o"));
numActualIterations++;
}
Assert.assertEquals(numExpectedIterations, numActualIterations);
}
}
@Test
public void testWithMultipleMixedRecords() throws IOException
{
final byte[][] keys = new byte[5][];
final byte[][] values = new byte[5][];
for (int i = 0; i < keys.length; i++) {
keys[i] = StringUtils.toUtf8(
"{\n"
+ " \"key\": \"sampleKey-" + i + "\"\n"
+ "}"
);
}
keys[2] = null;
for (int i = 0; i < values.length; i++) {
values[i] = StringUtils.toUtf8(
"{\n"
+ " \"timestamp\": \"2021-06-2" + i + "\",\n"
+ " \"bar\": null,\n"
+ " \"foo\": \"x\",\n"
+ " \"baz\": 4,\n"
+ " \"index\": " + i + ",\n"
+ " \"o\": {\n"
+ " \"mg\": 1\n"
+ " }\n"
+ "}"
);
}
Headers headers = new RecordHeaders(SAMPLE_HEADERS);
SettableByteEntity<KafkaRecordEntity> settableByteEntity = new SettableByteEntity<>();
final InputEntityReader reader = format.createReader(
new InputRowSchema(
new TimestampSpec("timestamp", "iso", null),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(
ImmutableList.of(
"bar",
"foo",
"kafka.newheader.encoding",
"kafka.newheader.kafkapkc",
"kafka.newts.timestamp",
"kafka.newtopic.topic"
)
)
),
ColumnsFilter.all()
),
settableByteEntity,
null
);
for (int i = 0; i < keys.length; i++) {
headers = headers.add(new RecordHeader("indexH", String.valueOf(i).getBytes(StandardCharsets.UTF_8)));
KafkaRecordEntity inputEntity = makeInputEntity(keys[i], values[i], headers);
settableByteEntity.setEntity(inputEntity);
final int numExpectedIterations = 1;
try (CloseableIterator<InputRow> iterator = reader.read()) {
int numActualIterations = 0;
while (iterator.hasNext()) {
final InputRow row = iterator.next();
// Payload verification
// this isn't super realistic, since most of these columns are not actually defined in the dimensionSpec
// but test reading them anyway since it isn't technically illegal
Assert.assertEquals(DateTimes.of("2021-06-2" + i), row.getTimestamp());
Assert.assertEquals("x", Iterables.getOnlyElement(row.getDimension("foo")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("baz")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("root_baz")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("path_omg")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("jq_omg")));
Assert.assertEquals(ImmutableMap.of("mg", 1L), row.getRaw("o"));
Assert.assertEquals(String.valueOf(i), Iterables.getOnlyElement(row.getDimension("index")));
// Header verification
Assert.assertEquals(
"application/json",
Iterables.getOnlyElement(row.getDimension("kafka.newheader.encoding"))
);
Assert.assertEquals("pkc-bar", Iterables.getOnlyElement(row.getDimension("kafka.newheader.kafkapkc")));
Assert.assertEquals(
String.valueOf(DateTimes.of("2021-06-24").getMillis()),
Iterables.getOnlyElement(row.getDimension("kafka.newts.timestamp"))
);
Assert.assertEquals(
TOPIC,
Iterables.getOnlyElement(row.getDimension("kafka.newtopic.topic"))
);
Assert.assertEquals(String.valueOf(i), Iterables.getOnlyElement(row.getDimension("kafka.newheader.indexH")));
// Key verification
if (i == 2) {
Assert.assertEquals(Collections.emptyList(), row.getDimension("kafka.newkey.key"));
} else {
Assert.assertEquals("sampleKey-" + i, Iterables.getOnlyElement(row.getDimension("kafka.newkey.key")));
}
Assert.assertTrue(row.getDimension("root_baz2").isEmpty());
Assert.assertTrue(row.getDimension("path_omg2").isEmpty());
Assert.assertTrue(row.getDimension("jq_omg2").isEmpty());
numActualIterations++;
}
Assert.assertEquals(numExpectedIterations, numActualIterations);
}
}
}
@Test
public void testMissingTimestampThrowsException() throws IOException
{
Headers headers = new RecordHeaders(SAMPLE_HEADERS);
KafkaRecordEntity inputEntity =
makeInputEntity(SIMPLE_JSON_KEY_BYTES, SIMPLE_JSON_VALUE_BYTES, headers);
final InputEntityReader reader = format.createReader(
new InputRowSchema(
new TimestampSpec("time", "iso", null),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(
ImmutableList.of(
"bar",
"foo",
"kafka.newheader.encoding",
"kafka.newheader.kafkapkc",
"kafka.newts.timestamp",
"kafka.newtopic.topic"
)
)
),
ColumnsFilter.all()
),
newSettableByteEntity(inputEntity),
null
);
try (CloseableIterator<InputRow> iterator = reader.read()) {
while (iterator.hasNext()) {
Throwable t = Assert.assertThrows(ParseException.class, iterator::next);
Assert.assertTrue(
t.getMessage().startsWith("Timestamp[null] is unparseable! Event: {")
);
}
}
}
@Test
public void testWithSchemaDiscovery() throws IOException
{
Headers headers = new RecordHeaders(SAMPLE_HEADERS);
KafkaRecordEntity inputEntity =
makeInputEntity(SIMPLE_JSON_KEY_BYTES, SIMPLE_JSON_VALUE_BYTES, headers);
final InputEntityReader reader = format.createReader(
new InputRowSchema(
new TimestampSpec("timestamp", "iso", null),
DimensionsSpec.builder().useSchemaDiscovery(true).build(),
ColumnsFilter.all()
),
newSettableByteEntity(inputEntity),
null
);
final int numExpectedIterations = 1;
try (CloseableIterator<InputRow> iterator = reader.read()) {
int numActualIterations = 0;
while (iterator.hasNext()) {
final InputRow row = iterator.next();
Assert.assertEquals(
Arrays.asList(
"kafka.newtopic.topic",
"foo",
"kafka.newts.timestamp",
"kafka.newkey.key",
"root_baz",
"o",
"bar",
"kafka.newheader.kafkapkc",
"path_omg",
"jq_omg",
"jq_omg2",
"baz",
"root_baz2",
"kafka.newheader.encoding",
"path_omg2"
),
row.getDimensions()
);
// Payload verifications
Assert.assertEquals(DateTimes.of("2021-06-25"), row.getTimestamp());
Assert.assertEquals("x", Iterables.getOnlyElement(row.getDimension("foo")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("baz")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("root_baz")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("path_omg")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("jq_omg")));
Assert.assertEquals(ImmutableMap.of("mg", 1L), row.getRaw("o"));
verifyHeader(row);
// Key verification
Assert.assertEquals("sampleKey", Iterables.getOnlyElement(row.getDimension("kafka.newkey.key")));
Assert.assertTrue(row.getDimension("root_baz2").isEmpty());
Assert.assertTrue(row.getDimension("path_omg2").isEmpty());
Assert.assertTrue(row.getDimension("jq_omg2").isEmpty());
numActualIterations++;
}
Assert.assertEquals(numExpectedIterations, numActualIterations);
}
}
@Test
public void testKeyInCsvFormat() throws IOException
{
format = new KafkaInputFormat(
new KafkaStringHeaderFormat(null),
// Key Format
new CsvInputFormat(
// name of the field doesn't matter, it just has to be something
Collections.singletonList("foo"),
null,
false,
false,
0,
null
),
// Value Format
new JsonInputFormat(
new JSONPathSpec(true, ImmutableList.of()),
null,
null,
false,
false
),
"kafka.newheader.",
"kafka.newkey.key",
"kafka.newts.timestamp",
"kafka.newtopic.topic"
);
Headers headers = new RecordHeaders(SAMPLE_HEADERS);
KafkaRecordEntity inputEntity =
makeInputEntity(
// x,y,z are ignored; key will be "sampleKey"
StringUtils.toUtf8("sampleKey,x,y,z"),
SIMPLE_JSON_VALUE_BYTES,
headers
);
final InputEntityReader reader = format.createReader(
new InputRowSchema(
new TimestampSpec("timestamp", "iso", null),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(
ImmutableList.of(
"bar",
"foo",
"kafka.newkey.key",
"kafka.newheader.encoding",
"kafka.newheader.kafkapkc",
"kafka.newts.timestamp",
"kafka.newtopic.topic"
)
)
),
ColumnsFilter.all()
),
newSettableByteEntity(inputEntity),
null
);
final int numExpectedIterations = 1;
try (CloseableIterator<InputRow> iterator = reader.read()) {
int numActualIterations = 0;
while (iterator.hasNext()) {
final InputRow row = iterator.next();
Assert.assertEquals(
Arrays.asList(
"bar",
"foo",
"kafka.newkey.key",
"kafka.newheader.encoding",
"kafka.newheader.kafkapkc",
"kafka.newts.timestamp",
"kafka.newtopic.topic"
),
row.getDimensions()
);
// Payload verifications
// this isn't super realistic, since most of these columns are not actually defined in the dimensionSpec
// but test reading them anyway since it isn't technically illegal
Assert.assertEquals(DateTimes.of("2021-06-25"), row.getTimestamp());
Assert.assertEquals("x", Iterables.getOnlyElement(row.getDimension("foo")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("baz")));
Assert.assertTrue(row.getDimension("bar").isEmpty());
verifyHeader(row);
// Key verification
Assert.assertEquals("sampleKey", Iterables.getOnlyElement(row.getDimension("kafka.newkey.key")));
numActualIterations++;
}
Assert.assertEquals(numExpectedIterations, numActualIterations);
}
}
@Test
public void testValueInCsvFormat() throws IOException
{
format = new KafkaInputFormat(
new KafkaStringHeaderFormat(null),
// Key Format
new JsonInputFormat(
new JSONPathSpec(true, ImmutableList.of()),
null,
null,
false,
false
),
// Value Format
new CsvInputFormat(
Arrays.asList("foo", "bar", "timestamp", "baz"),
null,
false,
false,
0,
null
),
"kafka.newheader.",
"kafka.newkey.key",
"kafka.newts.timestamp",
"kafka.newtopic.topic"
);
Headers headers = new RecordHeaders(SAMPLE_HEADERS);
KafkaRecordEntity inputEntity =
makeInputEntity(SIMPLE_JSON_KEY_BYTES, StringUtils.toUtf8("x,,2021-06-25,4"), headers);
final InputEntityReader reader = format.createReader(
new InputRowSchema(
new TimestampSpec("timestamp", "iso", null),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(
ImmutableList.of(
"bar",
"foo",
"kafka.newheader.encoding",
"kafka.newheader.kafkapkc",
"kafka.newts.timestamp",
"kafka.newtopic.topic"
)
)
),
ColumnsFilter.all()
),
newSettableByteEntity(inputEntity),
null
);
final int numExpectedIterations = 1;
try (CloseableIterator<InputRow> iterator = reader.read()) {
int numActualIterations = 0;
while (iterator.hasNext()) {
final InputRow row = iterator.next();
Assert.assertEquals(
Arrays.asList(
"bar",
"foo",
"kafka.newheader.encoding",
"kafka.newheader.kafkapkc",
"kafka.newts.timestamp",
"kafka.newtopic.topic"
),
row.getDimensions()
);
// Payload verifications
// this isn't super realistic, since most of these columns are not actually defined in the dimensionSpec
// but test reading them anyway since it isn't technically illegal
Assert.assertEquals(DateTimes.of("2021-06-25"), row.getTimestamp());
Assert.assertEquals("x", Iterables.getOnlyElement(row.getDimension("foo")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("baz")));
Assert.assertTrue(row.getDimension("bar").isEmpty());
verifyHeader(row);
// Key verification
Assert.assertEquals("sampleKey", Iterables.getOnlyElement(row.getDimension("kafka.newkey.key")));
numActualIterations++;
}
Assert.assertEquals(numExpectedIterations, numActualIterations);
}
}
@Test
public void testWithPartialDeclarationSchemaDiscovery() throws IOException
{
// testWithHeaderKeyAndValue + partial-schema + schema discovery
Headers headers = new RecordHeaders(SAMPLE_HEADERS);
KafkaRecordEntity inputEntity =
makeInputEntity(SIMPLE_JSON_KEY_BYTES, SIMPLE_JSON_VALUE_BYTES, headers);
final InputEntityReader reader = format.createReader(
new InputRowSchema(
new TimestampSpec("timestamp", "iso", null),
DimensionsSpec.builder().setDimensions(
DimensionsSpec.getDefaultSchemas(ImmutableList.of("bar", "kafka.newheader.kafkapkc"))
).useSchemaDiscovery(true).build(),
ColumnsFilter.all()
),
newSettableByteEntity(inputEntity),
null
);
final int numExpectedIterations = 1;
try (CloseableIterator<InputRow> iterator = reader.read()) {
int numActualIterations = 0;
while (iterator.hasNext()) {
final InputRow row = iterator.next();
Assert.assertEquals(
Arrays.asList(
"bar",
"kafka.newheader.kafkapkc",
"kafka.newtopic.topic",
"foo",
"kafka.newts.timestamp",
"kafka.newkey.key",
"root_baz",
"o",
"path_omg",
"jq_omg",
"jq_omg2",
"baz",
"root_baz2",
"kafka.newheader.encoding",
"path_omg2"
),
row.getDimensions()
);
// Payload verifications
Assert.assertEquals(DateTimes.of("2021-06-25"), row.getTimestamp());
Assert.assertEquals("x", Iterables.getOnlyElement(row.getDimension("foo")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("baz")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("root_baz")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("path_omg")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("jq_omg")));
Assert.assertEquals(ImmutableMap.of("mg", 1L), row.getRaw("o"));
verifyHeader(row);
// Key verification
Assert.assertEquals("sampleKey", Iterables.getOnlyElement(row.getDimension("kafka.newkey.key")));
Assert.assertTrue(row.getDimension("root_baz2").isEmpty());
Assert.assertTrue(row.getDimension("path_omg2").isEmpty());
Assert.assertTrue(row.getDimension("jq_omg2").isEmpty());
numActualIterations++;
}
Assert.assertEquals(numExpectedIterations, numActualIterations);
}
}
private KafkaRecordEntity makeInputEntity(byte[] key, byte[] payload, Headers headers)
{
return new KafkaRecordEntity(
new ConsumerRecord<>(
TOPIC,
0,
0,
TIMESTAMP_MILLIS,
null,
0,
0,
key,
payload,
headers,
Optional.empty()
)
);
}
private void verifyHeader(InputRow row)
{
Assert.assertEquals("application/json", Iterables.getOnlyElement(row.getDimension("kafka.newheader.encoding")));
Assert.assertEquals("pkc-bar", Iterables.getOnlyElement(row.getDimension("kafka.newheader.kafkapkc")));
Assert.assertEquals(
String.valueOf(DateTimes.of("2021-06-24").getMillis()),
Iterables.getOnlyElement(row.getDimension("kafka.newts.timestamp"))
);
Assert.assertEquals(
TOPIC,
Iterables.getOnlyElement(row.getDimension("kafka.newtopic.topic"))
);
Assert.assertEquals(
"2021-06-25",
Iterables.getOnlyElement(row.getDimension("timestamp"))
);
}
private SettableByteEntity<KafkaRecordEntity> newSettableByteEntity(KafkaRecordEntity kafkaRecordEntity)
{
SettableByteEntity<KafkaRecordEntity> settableByteEntity = new SettableByteEntity<>();
settableByteEntity.setEntity(kafkaRecordEntity);
return settableByteEntity;
}
}
|
apache/lucene | 36,079 | lucene/core/src/java/org/apache/lucene/index/MergePolicy.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.index;
import java.io.IOException;
import java.util.ArrayList;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CancellationException;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.BooleanSupplier;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import org.apache.lucene.document.Field;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MergeInfo;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.IOConsumer;
import org.apache.lucene.util.IOFunction;
import org.apache.lucene.util.IOSupplier;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.ThreadInterruptedException;
/**
* Expert: a MergePolicy determines the sequence of primitive merge operations.
*
* <p>Whenever the segments in an index have been altered by {@link IndexWriter}, either the
* addition of a newly flushed segment, addition of many segments from addIndexes* calls, or a
* previous merge that may now need to cascade, {@link IndexWriter} invokes {@link #findMerges} to
* give the MergePolicy a chance to pick merges that are now required. This method returns a {@link
* MergeSpecification} instance describing the set of merges that should be done, or null if no
* merges are necessary. When IndexWriter.forceMerge is called, it calls {@link
* #findForcedMerges(SegmentInfos, int, Map, MergeContext)} and the MergePolicy should then return
* the necessary merges.
*
* <p>Note that the policy can return more than one merge at a time. In this case, if the writer is
* using {@link SerialMergeScheduler}, the merges will be run sequentially but if it is using {@link
* ConcurrentMergeScheduler} they will be run concurrently.
*
* <p>The default MergePolicy is {@link TieredMergePolicy}.
*
* @lucene.experimental
*/
public abstract class MergePolicy {
/**
* Progress and state for an executing merge. This class encapsulates the logic to pause and
* resume the merge thread or to abort the merge entirely.
*
* @lucene.experimental
*/
public static class OneMergeProgress {
/** Reason for pausing the merge thread. */
public enum PauseReason {
/** Stopped (because of throughput rate set to 0, typically). */
STOPPED,
/** Temporarily paused because of exceeded throughput rate. */
PAUSED,
/** Other reason. */
OTHER
}
private final ReentrantLock pauseLock = new ReentrantLock();
private final Condition pausing = pauseLock.newCondition();
/** Pause times (in nanoseconds) for each {@link PauseReason}. */
private final EnumMap<PauseReason, AtomicLong> pauseTimesNS;
private volatile boolean aborted;
/**
* This field is for sanity-check purposes only. Only the same thread that invoked {@link
* OneMerge#mergeInit()} is permitted to be calling {@link #pauseNanos}. This is always verified
* at runtime.
*/
private Thread owner;
/** Creates a new merge progress info. */
public OneMergeProgress() {
// Place all the pause reasons in there immediately so that we can simply update values.
pauseTimesNS = new EnumMap<>(PauseReason.class);
for (PauseReason p : PauseReason.values()) {
pauseTimesNS.put(p, new AtomicLong());
}
}
/** Abort the merge this progress tracks at the next possible moment. */
public void abort() {
aborted = true;
wakeup(); // wakeup any paused merge thread.
}
/** Return the aborted state of this merge. */
public boolean isAborted() {
return aborted;
}
/**
* Pauses the calling thread for at least <code>pauseNanos</code> nanoseconds unless the merge
* is aborted or the external condition returns <code>false</code>, in which case control
* returns immediately.
*
* <p>The external condition is required so that other threads can terminate the pausing
* immediately, before <code>pauseNanos</code> expires. We can't rely on just {@link
* Condition#awaitNanos(long)} alone because it can return due to spurious wakeups too.
*
* @param condition The pause condition that should return false if immediate return from this
* method is needed. Other threads can wake up any sleeping thread by calling {@link
* #wakeup}, but it'd fall to sleep for the remainder of the requested time if this
* condition
*/
public void pauseNanos(long pauseNanos, PauseReason reason, BooleanSupplier condition)
throws InterruptedException {
long start = System.nanoTime();
AtomicLong timeUpdate = pauseTimesNS.get(reason);
pauseLock.lock();
try {
while (pauseNanos > 0 && !aborted && condition.getAsBoolean()) {
pauseNanos = pausing.awaitNanos(pauseNanos);
}
} finally {
pauseLock.unlock();
timeUpdate.addAndGet(System.nanoTime() - start);
}
}
/** Request a wakeup for any threads stalled in {@link #pauseNanos}. */
public void wakeup() {
pauseLock.lock();
try {
pausing.signalAll();
} finally {
pauseLock.unlock();
}
}
/** Returns pause reasons and associated times in nanoseconds. */
public Map<PauseReason, Long> getPauseTimes() {
Set<Entry<PauseReason, AtomicLong>> entries = pauseTimesNS.entrySet();
return entries.stream().collect(Collectors.toMap(Entry::getKey, (e) -> e.getValue().get()));
}
final void setMergeThread(Thread owner) {
assert this.owner == null;
this.owner = owner;
}
}
/**
* OneMerge provides the information necessary to perform an individual primitive merge operation,
* resulting in a single new segment. The merge spec includes the subset of segments to be merged
* as well as whether the new segment should use the compound file format.
*
* @lucene.experimental
*/
public static class OneMerge {
private final CompletableFuture<Boolean> mergeCompleted = new CompletableFuture<>();
SegmentCommitInfo info; // used by IndexWriter
boolean registerDone; // used by IndexWriter
long mergeGen; // used by IndexWriter
boolean isExternal; // used by IndexWriter
int maxNumSegments = -1; // used by IndexWriter
boolean usesPooledReaders; // used by IndexWriter to drop readers while closing
/** Estimated size in bytes of the merged segment. */
public volatile long estimatedMergeBytes; // used by IndexWriter
// Sum of sizeInBytes of all SegmentInfos; set by IW.mergeInit
volatile long totalMergeBytes;
private List<MergeReader> mergeReaders; // used by IndexWriter
/** Segments to be merged. */
public final List<SegmentCommitInfo> segments;
/** Control used to pause/stop/resume the merge thread. */
private final OneMergeProgress mergeProgress;
volatile long mergeStartNS = -1;
/** Total number of documents in segments to be merged, not accounting for deletions. */
final int totalMaxDoc;
Throwable error;
/**
* Sole constructor.
*
* @param segments List of {@link SegmentCommitInfo}s to be merged.
*/
public OneMerge(List<SegmentCommitInfo> segments) {
if (segments.isEmpty()) {
throw new RuntimeException("segments must include at least one segment");
}
// clone the list, as the in list may be based off original SegmentInfos and may be modified
this.segments = List.copyOf(segments);
totalMaxDoc = segments.stream().mapToInt(i -> i.info.maxDoc()).sum();
mergeProgress = new OneMergeProgress();
mergeReaders = List.of();
usesPooledReaders = true;
}
/**
* Create a OneMerge directly from CodecReaders. Used to merge incoming readers in {@link
* IndexWriter#addIndexes(CodecReader...)}. This OneMerge works directly on readers and has an
* empty segments list.
*
* @param codecReaders Codec readers to merge
*/
public OneMerge(CodecReader... codecReaders) {
List<MergeReader> readers = new ArrayList<>(codecReaders.length);
int totalDocs = 0;
for (CodecReader r : codecReaders) {
readers.add(new MergeReader(r, r.getLiveDocs()));
totalDocs += r.numDocs();
}
mergeReaders = List.copyOf(readers);
segments = List.of();
totalMaxDoc = totalDocs;
mergeProgress = new OneMergeProgress();
usesPooledReaders = false;
}
/** Constructor for wrapping. */
protected OneMerge(OneMerge oneMerge) {
this.segments = oneMerge.segments;
this.mergeReaders = oneMerge.mergeReaders;
this.totalMaxDoc = oneMerge.totalMaxDoc;
this.mergeProgress = new OneMergeProgress();
this.usesPooledReaders = oneMerge.usesPooledReaders;
}
/**
* Called by {@link IndexWriter} after the merge started and from the thread that will be
* executing the merge.
*/
public void mergeInit() throws IOException {
mergeProgress.setMergeThread(Thread.currentThread());
}
/**
* Called by {@link IndexWriter} after the merge is done and all readers have been closed.
*
* @param success true iff the merge finished successfully i.e. was committed
* @param segmentDropped true iff the merged segment was dropped since it was fully deleted
*/
public void mergeFinished(boolean success, boolean segmentDropped) throws IOException {}
/** Closes this merge and releases all merge readers */
final void close(
boolean success, boolean segmentDropped, IOConsumer<MergeReader> readerConsumer)
throws IOException {
// this method is final to ensure we never miss a super call to clean up and finish the merge
if (mergeCompleted.complete(success) == false) {
throw new IllegalStateException("merge has already finished");
}
try {
mergeFinished(success, segmentDropped);
} finally {
final List<MergeReader> readers = mergeReaders;
mergeReaders = List.of();
IOUtils.applyToAll(readers, readerConsumer);
}
}
/**
* Wrap a reader prior to merging in order to add/remove fields or documents.
*
* <p><b>NOTE:</b> It is illegal to reorder doc IDs here, use {@link
* #reorder(CodecReader,Directory,Executor)} instead.
*/
public CodecReader wrapForMerge(CodecReader reader) throws IOException {
return reader;
}
/**
* Extend this method if you wish to renumber doc IDs. This method will be called when index
* sorting is disabled on a merged view of the {@link OneMerge}. A {@code null} return value
* indicates that doc IDs should not be reordered.
*
* <p><b>NOTE:</b> Returning a non-null value here disables several optimizations and increases
* the merging overhead.
*
* @param reader The reader to reorder.
* @param dir The {@link Directory} of the index, which may be used to create temporary files.
* @param executor An executor that can be used to parallelize the reordering logic. May be
* {@code null} if no concurrency is supported.
* @lucene.experimental
*/
public Sorter.DocMap reorder(CodecReader reader, Directory dir, Executor executor)
throws IOException {
return null;
}
/**
* Expert: Sets the {@link SegmentCommitInfo} of the merged segment. Allows sub-classes to e.g.
* {@link SegmentInfo#addDiagnostics(Map) add diagnostic} properties.
*/
public void setMergeInfo(SegmentCommitInfo info) {
this.info = info;
}
/**
* Returns the {@link SegmentCommitInfo} for the merged segment, or null if it hasn't been set
* yet.
*/
public SegmentCommitInfo getMergeInfo() {
return info;
}
/** Record that an exception occurred while executing this merge */
synchronized void setException(Throwable error) {
this.error = error;
}
/** Retrieve previous exception set by {@link #setException}. */
synchronized Throwable getException() {
return error;
}
/** Returns a readable description of the current merge state. */
public String segString() {
StringBuilder b = new StringBuilder();
final int numSegments = segments.size();
for (int i = 0; i < numSegments; i++) {
if (i > 0) {
b.append(' ');
}
b.append(segments.get(i).toString());
}
if (info != null) {
b.append(" into ").append(info.info.name);
}
if (maxNumSegments != -1) {
b.append(" [maxNumSegments=").append(maxNumSegments).append(']');
}
if (isAborted()) {
b.append(" [ABORTED]");
}
return b.toString();
}
/**
* Returns the total size in bytes of this merge. Note that this does not indicate the size of
* the merged segment, but the input total size. This is only set once the merge is initialized
* by IndexWriter.
*/
public long totalBytesSize() {
return totalMergeBytes;
}
/**
* Returns the total number of documents that are included with this merge. Note that this does
* not indicate the number of documents after the merge.
*/
public int totalNumDocs() {
return totalMaxDoc;
}
/** Return {@link MergeInfo} describing this merge. */
public MergeInfo getStoreMergeInfo() {
return new MergeInfo(totalMaxDoc, estimatedMergeBytes, isExternal, maxNumSegments);
}
/** Returns true if this merge was or should be aborted. */
public boolean isAborted() {
return mergeProgress.isAborted();
}
/**
* Marks this merge as aborted. The merge thread should terminate at the soonest possible
* moment.
*/
public void setAborted() {
this.mergeProgress.abort();
}
/** Checks if merge has been aborted and throws a merge exception if so. */
public void checkAborted() throws MergeAbortedException {
if (isAborted()) {
throw new MergePolicy.MergeAbortedException("merge is aborted: " + segString());
}
}
/**
* Returns a {@link OneMergeProgress} instance for this merge, which provides statistics of the
* merge threads (run time vs. sleep time) if merging is throttled.
*/
public OneMergeProgress getMergeProgress() {
return mergeProgress;
}
/**
* Waits for this merge to be completed
*
* @return true if the merge finished within the specified timeout
*/
boolean await(long timeout, TimeUnit timeUnit) {
try {
mergeCompleted.get(timeout, timeUnit);
return true;
} catch (InterruptedException e) {
throw new ThreadInterruptedException(e);
} catch (ExecutionException | TimeoutException _) {
return false;
}
}
/**
* Returns true if the merge has finished or false if it's still running or has not been
* started. This method will not block.
*/
boolean hasFinished() {
return mergeCompleted.isDone();
}
/**
* Returns true iff the merge completed successfully or false if the merge succeeded with a
* failure. This method will not block and return an empty Optional if the merge has not
* finished yet
*/
Optional<Boolean> hasCompletedSuccessfully() {
return Optional.ofNullable(mergeCompleted.getNow(null));
}
/** Called just before the merge is applied to IndexWriter's SegmentInfos */
void onMergeComplete() throws IOException {}
/** Sets the merge readers for this merge. */
void initMergeReaders(IOFunction<SegmentCommitInfo, MergeReader> readerFactory)
throws IOException {
assert mergeReaders.isEmpty() : "merge readers must be empty";
assert mergeCompleted.isDone() == false : "merge is already done";
final ArrayList<MergeReader> readers = new ArrayList<>(segments.size());
try {
for (final SegmentCommitInfo info : segments) {
// Hold onto the "live" reader; we will use this to
// commit merged deletes
readers.add(readerFactory.apply(info));
}
} finally {
// ensure we assign this to close them in the case of an exception
// we do a copy here to ensure that mergeReaders are an immutable list
this.mergeReaders = List.copyOf(readers);
}
}
/** Returns the merge readers or an empty list if the readers were not initialized yet. */
List<MergeReader> getMergeReader() {
return mergeReaders;
}
}
/**
* A MergeSpecification instance provides the information necessary to perform multiple merges. It
* simply contains a list of {@link OneMerge} instances.
*/
public static class MergeSpecification {
/** The subset of segments to be included in the primitive merge. */
public final List<OneMerge> merges = new ArrayList<>();
/** Sole constructor. Use {@link #add(MergePolicy.OneMerge)} to add merges. */
public MergeSpecification() {}
/** Adds the provided {@link OneMerge} to this specification. */
public void add(OneMerge merge) {
merges.add(merge);
}
/**
* Returns a description of the merges in this specification
*
* @deprecated Use {@link #toString()} instead. The {@code Directory} parameter is ignored and
* will be removed in a future release.
*/
@Deprecated
public String segString(Directory dir) {
return toString();
}
@Override
public String toString() {
StringBuilder b = new StringBuilder();
b.append("MergeSpec:");
final int count = merges.size();
for (int i = 0; i < count; i++) {
b.append("\n ").append(1 + i).append(": ").append(merges.get(i).segString());
}
return b.toString();
}
CompletableFuture<Void> getMergeCompletedFutures() {
return CompletableFuture.allOf(
merges.stream().map(m -> m.mergeCompleted).toArray(CompletableFuture<?>[]::new));
}
/** Waits, until interrupted, for all merges to complete. */
boolean await() {
try {
CompletableFuture<Void> future = getMergeCompletedFutures();
future.get();
return true;
} catch (InterruptedException e) {
throw new ThreadInterruptedException(e);
} catch (ExecutionException | CancellationException _) {
return false;
}
}
/** Waits if necessary for at most the given time for all merges. */
boolean await(long timeout, TimeUnit unit) {
try {
CompletableFuture<Void> future = getMergeCompletedFutures();
future.get(timeout, unit);
return true;
} catch (InterruptedException e) {
throw new ThreadInterruptedException(e);
} catch (ExecutionException | TimeoutException _) {
return false;
}
}
}
/** Exception thrown if there are any problems while executing a merge. */
public static class MergeException extends RuntimeException {
/** Create a {@code MergeException}. */
public MergeException(String message) {
super(message);
}
/** Create a {@code MergeException}. */
public MergeException(Throwable exc) {
super(exc);
}
}
/**
* Thrown when a merge was explicitly aborted because {@link IndexWriter#abortMerges} was called.
* Normally this exception is privately caught and suppressed by {@link IndexWriter}.
*/
public static class MergeAbortedException extends IOException {
/** Create a {@link MergeAbortedException}. */
public MergeAbortedException() {
super("merge is aborted");
}
/** Create a {@link MergeAbortedException} with a specified message. */
public MergeAbortedException(String message) {
super(message);
}
}
/**
* Default ratio for compound file system usage. Set to <code>1.0</code>, always use compound file
* system.
*/
protected static final double DEFAULT_NO_CFS_RATIO = 1.0;
/**
* Default max segment size in order to use compound file system. Set to {@link Long#MAX_VALUE}.
*/
protected static final long DEFAULT_MAX_CFS_SEGMENT_SIZE = Long.MAX_VALUE;
/**
* If the size of the merge segment exceeds this ratio of the total index size then it will remain
* in non-compound format
*/
protected double noCFSRatio;
/**
* If the size of the merged segment exceeds this value then it will not use compound file format.
*/
protected long maxCFSSegmentSize;
/** Creates a new merge policy instance. */
protected MergePolicy() {
this(DEFAULT_NO_CFS_RATIO, DEFAULT_MAX_CFS_SEGMENT_SIZE);
}
/**
* Creates a new merge policy instance with default settings for noCFSRatio and maxCFSSegmentSize.
* This ctor should be used by subclasses using different defaults than the {@link MergePolicy}
*/
protected MergePolicy(double defaultNoCFSRatio, long defaultMaxCFSSegmentSize) {
this.noCFSRatio = defaultNoCFSRatio;
this.maxCFSSegmentSize = defaultMaxCFSSegmentSize;
}
/**
* Determine what set of merge operations are now necessary on the index. {@link IndexWriter}
* calls this whenever there is a change to the segments. This call is always synchronized on the
* {@link IndexWriter} instance so only one thread at a time will call this method.
*
* @param mergeTrigger the event that triggered the merge
* @param segmentInfos the total set of segments in the index
* @param mergeContext the IndexWriter to find the merges on
*/
public abstract MergeSpecification findMerges(
MergeTrigger mergeTrigger, SegmentInfos segmentInfos, MergeContext mergeContext)
throws IOException;
/**
* Define the set of merge operations to perform on provided codec readers in {@link
* IndexWriter#addIndexes(CodecReader...)}.
*
* <p>The merge operation is required to convert provided readers into segments that can be added
* to the writer. This API can be overridden in custom merge policies to control the concurrency
* for addIndexes. Default implementation creates a single merge operation for all provided
* readers (lowest concurrency). Creating a merge for each reader, would provide the highest level
* of concurrency possible with the configured merge scheduler.
*
* @param readers CodecReader(s) to merge into the main index
*/
public MergeSpecification findMerges(CodecReader... readers) throws IOException {
MergeSpecification mergeSpec = new MergeSpecification();
mergeSpec.add(new OneMerge(readers));
return mergeSpec;
}
/**
* Determine what set of merge operations is necessary in order to merge to {@code <=} the
* specified segment count. {@link IndexWriter} calls this when its {@link IndexWriter#forceMerge}
* method is called. This call is always synchronized on the {@link IndexWriter} instance so only
* one thread at a time will call this method.
*
* @param segmentInfos the total set of segments in the index
* @param maxSegmentCount requested maximum number of segments in the index
* @param segmentsToMerge contains the specific SegmentInfo instances that must be merged away.
* This may be a subset of all SegmentInfos. If the value is True for a given SegmentInfo,
* that means this segment was an original segment present in the to-be-merged index; else, it
* was a segment produced by a cascaded merge.
* @param mergeContext the MergeContext to find the merges on
*/
public abstract MergeSpecification findForcedMerges(
SegmentInfos segmentInfos,
int maxSegmentCount,
Map<SegmentCommitInfo, Boolean> segmentsToMerge,
MergeContext mergeContext)
throws IOException;
/**
* Determine what set of merge operations is necessary in order to expunge all deletes from the
* index.
*
* @param segmentInfos the total set of segments in the index
* @param mergeContext the MergeContext to find the merges on
*/
public abstract MergeSpecification findForcedDeletesMerges(
SegmentInfos segmentInfos, MergeContext mergeContext) throws IOException;
/**
* Identifies merges that we want to execute (synchronously) on commit. By default, this will
* return {@link #findMerges natural merges} whose segments are all less than the {@link
* #maxFullFlushMergeSize() max segment size for full flushes}.
*
* <p>Any merges returned here will make {@link IndexWriter#commit()}, {@link
* IndexWriter#prepareCommit()} or {@link IndexWriter#getReader(boolean, boolean)} block until the
* merges complete or until {@link IndexWriterConfig#getMaxFullFlushMergeWaitMillis()} has
* elapsed. This may be used to merge small segments that have just been flushed, reducing the
* number of segments in the point in time snapshot. If a merge does not complete in the allotted
* time, it will continue to execute, and eventually finish and apply to future point in time
* snapshot, but will not be reflected in the current one.
*
* <p>If a {@link OneMerge} in the returned {@link MergeSpecification} includes a segment already
* included in a registered merge, then {@link IndexWriter#commit()} or {@link
* IndexWriter#prepareCommit()} will throw a {@link IllegalStateException}. Use {@link
* MergeContext#getMergingSegments()} to determine which segments are currently registered to
* merge.
*
* @param mergeTrigger the event that triggered the merge (COMMIT or GET_READER).
* @param segmentInfos the total set of segments in the index (while preparing the commit)
* @param mergeContext the MergeContext to find the merges on, which should be used to determine
* which segments are already in a registered merge (see {@link
* MergeContext#getMergingSegments()}).
*/
public MergeSpecification findFullFlushMerges(
MergeTrigger mergeTrigger, SegmentInfos segmentInfos, MergeContext mergeContext)
throws IOException {
// This returns natural merges that contain segments below the minimum size
MergeSpecification mergeSpec = findMerges(mergeTrigger, segmentInfos, mergeContext);
if (mergeSpec == null) {
return null;
}
MergeSpecification newMergeSpec = null;
for (OneMerge oneMerge : mergeSpec.merges) {
boolean belowMaxFullFlushSize = true;
for (SegmentCommitInfo sci : oneMerge.segments) {
if (size(sci, mergeContext) >= maxFullFlushMergeSize()) {
belowMaxFullFlushSize = false;
break;
}
}
if (belowMaxFullFlushSize) {
if (newMergeSpec == null) {
newMergeSpec = new MergeSpecification();
}
newMergeSpec.add(oneMerge);
}
}
return newMergeSpec;
}
/**
* Returns true if a new segment (regardless of its origin) should use the compound file format.
* The default implementation returns <code>true</code> iff the size of the given mergedInfo is
* less or equal to {@link #getMaxCFSSegmentSizeMB()} and the size is less or equal to the
* TotalIndexSize * {@link #getNoCFSRatio()} otherwise <code>false</code>.
*/
public boolean useCompoundFile(
SegmentInfos infos, SegmentCommitInfo mergedInfo, MergeContext mergeContext)
throws IOException {
if (getNoCFSRatio() == 0.0) {
return false;
}
long mergedInfoSize = size(mergedInfo, mergeContext);
if (mergedInfoSize > maxCFSSegmentSize) {
return false;
}
if (getNoCFSRatio() >= 1.0) {
return true;
}
long totalSize = 0;
for (SegmentCommitInfo info : infos) {
totalSize += size(info, mergeContext);
}
return mergedInfoSize <= getNoCFSRatio() * totalSize;
}
/**
* Return the byte size of the provided {@link SegmentCommitInfo}, prorated by percentage of
* non-deleted documents.
*/
protected long size(SegmentCommitInfo info, MergeContext mergeContext) throws IOException {
long byteSize = info.sizeInBytes();
int delCount = mergeContext.numDeletesToMerge(info);
assert assertDelCount(delCount, info);
double delRatio =
info.info.maxDoc() <= 0 ? 0d : (double) delCount / (double) info.info.maxDoc();
assert delRatio <= 1.0;
return (info.info.maxDoc() <= 0 ? byteSize : (long) (byteSize * (1.0 - delRatio)));
}
/**
* Return the maximum size of segments to be included in full-flush merges by the default
* implementation of {@link #findFullFlushMerges}.
*/
protected long maxFullFlushMergeSize() {
return 0L;
}
/** Asserts that the delCount for this SegmentCommitInfo is valid */
protected final boolean assertDelCount(int delCount, SegmentCommitInfo info) {
assert delCount >= 0 : "delCount must be positive: " + delCount;
assert delCount <= info.info.maxDoc()
: "delCount: " + delCount + " must be leq than maxDoc: " + info.info.maxDoc();
return true;
}
/**
* Returns true if this single info is already fully merged (has no pending deletes, is in the
* same dir as the writer, and matches the current compound file setting
*/
protected final boolean isMerged(
SegmentInfos infos, SegmentCommitInfo info, MergeContext mergeContext) throws IOException {
assert mergeContext != null;
int delCount = mergeContext.numDeletesToMerge(info);
assert assertDelCount(delCount, info);
return delCount == 0
&& useCompoundFile(infos, info, mergeContext) == info.info.getUseCompoundFile();
}
/**
* Returns current {@code noCFSRatio}.
*
* @see #setNoCFSRatio
*/
public double getNoCFSRatio() {
return noCFSRatio;
}
/**
* If a merged segment will be more than this percentage of the total size of the index, leave the
* segment as non-compound file even if compound file is enabled. Set to 1.0 to always use CFS
* regardless of merge size.
*/
public void setNoCFSRatio(double noCFSRatio) {
if (noCFSRatio < 0.0 || noCFSRatio > 1.0) {
throw new IllegalArgumentException(
"noCFSRatio must be 0.0 to 1.0 inclusive; got " + noCFSRatio);
}
this.noCFSRatio = noCFSRatio;
}
/** Returns the largest size allowed for a compound file segment */
public double getMaxCFSSegmentSizeMB() {
return maxCFSSegmentSize / 1024. / 1024.;
}
/**
* If a merged segment will be more than this value, leave the segment as non-compound file even
* if compound file is enabled. Set this to Double.POSITIVE_INFINITY (default) and noCFSRatio to
* 1.0 to always use CFS regardless of merge size.
*/
public void setMaxCFSSegmentSizeMB(double v) {
if (v < 0.0) {
throw new IllegalArgumentException("maxCFSSegmentSizeMB must be >=0 (got " + v + ")");
}
v *= 1024 * 1024;
this.maxCFSSegmentSize = v > Long.MAX_VALUE ? Long.MAX_VALUE : (long) v;
}
/**
* Returns true if the segment represented by the given CodecReader should be kept even if it's
* fully deleted. This is useful for testing of for instance if the merge policy implements
* retention policies for soft deletes.
*/
public boolean keepFullyDeletedSegment(IOSupplier<CodecReader> readerIOSupplier)
throws IOException {
return false;
}
/**
* Returns the number of deletes that a merge would claim on the given segment. This method will
* by default return the sum of the del count on disk and the pending delete count. Yet,
* subclasses that wrap merge readers might modify this to reflect deletes that are carried over
* to the target segment in the case of soft deletes.
*
* <p>Soft deletes all deletes to survive across merges in order to control when the soft-deleted
* data is claimed.
*
* @see IndexWriter#softUpdateDocument(Term, Iterable, Field...)
* @see IndexWriterConfig#setSoftDeletesField(String)
* @param info the segment info that identifies the segment
* @param delCount the number deleted documents for this segment
* @param readerSupplier a supplier that allows to obtain a {@link CodecReader} for this segment
*/
public int numDeletesToMerge(
SegmentCommitInfo info, int delCount, IOSupplier<CodecReader> readerSupplier)
throws IOException {
return delCount;
}
/** Builds a String representation of the given SegmentCommitInfo instances */
protected final String segString(MergeContext mergeContext, Iterable<SegmentCommitInfo> infos) {
return StreamSupport.stream(infos.spliterator(), false)
.map(info -> info.toString(mergeContext.numDeletedDocs(info) - info.getDelCount()))
.collect(Collectors.joining(" "));
}
/** Print a debug message to {@link MergeContext}'s {@code infoStream}. */
protected final void message(String message, MergeContext mergeContext) {
if (verbose(mergeContext)) {
mergeContext.getInfoStream().message("MP", message);
}
}
/**
* Returns <code>true</code> if the info-stream is in verbose mode
*
* @see #message(String, MergeContext)
*/
protected final boolean verbose(MergeContext mergeContext) {
return mergeContext.getInfoStream().isEnabled("MP");
}
/**
* This interface represents the current context of the merge selection process. It allows to
* access real-time information like the currently merging segments or how many deletes a segment
* would claim back if merged. This context might be stateful and change during the execution of a
* merge policy's selection processes.
*
* @lucene.experimental
*/
public interface MergeContext {
/**
* Returns the number of deletes a merge would claim back if the given segment is merged.
*
* @see MergePolicy#numDeletesToMerge(SegmentCommitInfo, int, org.apache.lucene.util.IOSupplier)
* @param info the segment to get the number of deletes for
*/
int numDeletesToMerge(SegmentCommitInfo info) throws IOException;
/** Returns the number of deleted documents in the given segments. */
int numDeletedDocs(SegmentCommitInfo info);
/** Returns the info stream that can be used to log messages */
InfoStream getInfoStream();
/** Returns an unmodifiable set of segments that are currently merging. */
Set<SegmentCommitInfo> getMergingSegments();
}
static final class MergeReader {
final CodecReader codecReader;
final SegmentReader reader;
final Bits hardLiveDocs;
MergeReader(SegmentReader reader, Bits hardLiveDocs) {
this.codecReader = reader;
this.reader = reader;
this.hardLiveDocs = hardLiveDocs;
}
MergeReader(CodecReader reader, Bits hardLiveDocs) {
if (SegmentReader.class.isAssignableFrom(reader.getClass())) {
this.reader = (SegmentReader) reader;
} else {
this.reader = null;
}
this.codecReader = reader;
this.hardLiveDocs = hardLiveDocs;
}
}
}
|
apache/hadoop | 35,874 | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/async/impl/TestNMClientAsync.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.client.api.async.impl;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicIntegerArray;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.util.Records;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.service.ServiceOperations;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.client.api.NMClient;
import org.apache.hadoop.yarn.client.api.async.NMClientAsync;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.RPCUtil;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
public class TestNMClientAsync {
private final RecordFactory recordFactory =
RecordFactoryProvider.getRecordFactory(null);
private NMClientAsyncImpl asyncClient;
private NodeId nodeId;
private Token containerToken;
enum OpsToTest {
START, QUERY, STOP, INCR, REINIT, RESTART, ROLLBACK, COMMIT
}
final static class TestData {
AtomicInteger success = new AtomicInteger(0);
AtomicInteger failure = new AtomicInteger(0);
final AtomicIntegerArray successArray;
final AtomicIntegerArray failureArray;
private TestData(int expectedSuccess, int expectedFailure) {
this.successArray = new AtomicIntegerArray(expectedSuccess);
this.failureArray = new AtomicIntegerArray(expectedFailure);
}
}
@AfterEach
public void teardown() {
ServiceOperations.stop(asyncClient);
}
@Test
@Timeout(value = 10)
public void testNMClientAsync() throws Exception {
Configuration conf = new Configuration();
conf.setInt(YarnConfiguration.NM_CLIENT_ASYNC_THREAD_POOL_MAX_SIZE, 10);
// Threads to run are more than the max size of the thread pool
int expectedSuccess = 40;
int expectedFailure = 40;
asyncClient = new MockNMClientAsync1(expectedSuccess, expectedFailure);
asyncClient.init(conf);
assertEquals(10, asyncClient.maxThreadPoolSize,
"The max thread pool size is not correctly set");
asyncClient.start();
for (int i = 0; i < expectedSuccess + expectedFailure; ++i) {
if (i == expectedSuccess) {
while (!((TestCallbackHandler1) asyncClient.getCallbackHandler())
.isAllSuccessCallsExecuted()) {
Thread.sleep(10);
}
asyncClient.setClient(mockNMClient(1));
}
Container container = mockContainer(i);
ContainerLaunchContext clc =
recordFactory.newRecordInstance(ContainerLaunchContext.class);
asyncClient.startContainerAsync(container, clc);
}
while (!((TestCallbackHandler1) asyncClient.getCallbackHandler())
.isStartAndQueryFailureCallsExecuted()) {
Thread.sleep(10);
}
asyncClient.setClient(mockNMClient(2));
((TestCallbackHandler1) asyncClient.getCallbackHandler()).path = false;
for (int i = 0; i < expectedFailure; ++i) {
Container container = mockContainer(
expectedSuccess + expectedFailure + i);
ContainerLaunchContext clc =
recordFactory.newRecordInstance(ContainerLaunchContext.class);
asyncClient.startContainerAsync(container, clc);
}
while (!((TestCallbackHandler1) asyncClient.getCallbackHandler())
.isIncreaseResourceFailureCallsExecuted()) {
Thread.sleep(10);
}
while (!((TestCallbackHandler1) asyncClient.getCallbackHandler())
.isStopFailureCallsExecuted()) {
Thread.sleep(10);
}
for (String errorMsg :
((TestCallbackHandler1) asyncClient.getCallbackHandler())
.errorMsgs) {
System.out.println(errorMsg);
}
assertEquals(0,
((TestCallbackHandler1) asyncClient.getCallbackHandler())
.errorMsgs.size(), "Error occurs in CallbackHandler");
for (String errorMsg : ((MockNMClientAsync1) asyncClient).errorMsgs) {
System.out.println(errorMsg);
}
assertEquals(0, ((MockNMClientAsync1) asyncClient).errorMsgs.size(),
"Error occurs in ContainerEventProcessor");
// When the callback functions are all executed, the event processor threads
// may still not terminate and the containers may still not removed.
while (asyncClient.containers.size() > 0) {
Thread.sleep(10);
}
asyncClient.stop();
assertFalse(asyncClient.eventDispatcherThread.isAlive(),
"The thread of Container Management Event Dispatcher is still alive");
assertTrue(asyncClient.threadPool.isShutdown(),
"The thread pool is not shut down");
}
private class MockNMClientAsync1 extends NMClientAsyncImpl {
private Set<String> errorMsgs =
Collections.synchronizedSet(new HashSet<String>());
protected MockNMClientAsync1(int expectedSuccess, int expectedFailure)
throws YarnException, IOException {
super(MockNMClientAsync1.class.getName(), mockNMClient(0),
new TestCallbackHandler1(expectedSuccess, expectedFailure));
}
private class MockContainerEventProcessor extends ContainerEventProcessor {
public MockContainerEventProcessor(ContainerEvent event) {
super(event);
}
@Override
public void run() {
try {
super.run();
} catch (RuntimeException e) {
// If the unexpected throwable comes from error callback functions, it
// will break ContainerEventProcessor.run(). Therefore, monitor
// the exception here
errorMsgs.add("Unexpected throwable from callback functions should" +
" be ignored by Container " + event.getContainerId());
}
}
}
@Override
protected ContainerEventProcessor getContainerEventProcessor(
ContainerEvent event) {
return new MockContainerEventProcessor(event);
}
}
private class TestCallbackHandler1
extends NMClientAsync.AbstractCallbackHandler {
private boolean path = true;
private int expectedSuccess;
private int expectedFailure;
private final Map<OpsToTest, TestData> testMap = new HashMap<>();
private Set<String> errorMsgs =
Collections.synchronizedSet(new HashSet<String>());
public TestCallbackHandler1(int expectedSuccess, int expectedFailure) {
this.expectedSuccess = expectedSuccess;
this.expectedFailure = expectedFailure;
for (OpsToTest op : OpsToTest.values()) {
testMap.put(op, new TestData(expectedSuccess, expectedFailure));
}
}
@SuppressWarnings("deprecation")
@Override
public void onContainerStarted(ContainerId containerId,
Map<String, ByteBuffer> allServiceResponse) {
if (path) {
if (containerId.getId() >= expectedSuccess) {
errorMsgs.add("Container " + containerId +
" should throw the exception onContainerStarted");
return;
}
TestData td = testMap.get(OpsToTest.START);
td.success.addAndGet(1);
td.successArray.set(containerId.getId(), 1);
// move on to the following success tests
asyncClient.getContainerStatusAsync(containerId, nodeId);
} else {
// move on to the following failure tests
// make sure we pass in the container with the same
// containerId
Container container = Container.newInstance(
containerId, nodeId, null, null, null, containerToken);
int t = containerId.getId() % 5;
switch (t) {
case 0:
asyncClient.updateContainerResourceAsync(container);
break;
case 1:
asyncClient.reInitializeContainerAsync(containerId,
recordFactory.newRecordInstance(ContainerLaunchContext.class),
true);
break;
case 2:
asyncClient.restartContainerAsync(containerId);
break;
case 3:
asyncClient.rollbackLastReInitializationAsync(containerId);
break;
case 4:
asyncClient.commitLastReInitializationAsync(containerId);
break;
default:
break;
}
}
// Shouldn't crash the test thread
throw new RuntimeException("Ignorable Exception");
}
@SuppressWarnings("deprecation")
@Override
public void onContainerStatusReceived(ContainerId containerId,
ContainerStatus containerStatus) {
if (containerId.getId() >= expectedSuccess) {
errorMsgs.add("Container " + containerId +
" should throw the exception onContainerStatusReceived");
return;
}
TestData td = testMap.get(OpsToTest.QUERY);
td.success.addAndGet(1);
td.successArray.set(containerId.getId(), 1);
// move on to the following success tests
// make sure we pass in the container with the same
// containerId
Container container = Container.newInstance(
containerId, nodeId, null, null, null, containerToken);
asyncClient.updateContainerResourceAsync(container);
// Shouldn't crash the test thread
throw new RuntimeException("Ignorable Exception");
}
@SuppressWarnings("deprecation")
@Override
public void onContainerResourceIncreased(
ContainerId containerId, Resource resource) {
if (containerId.getId() >= expectedSuccess) {
errorMsgs.add("Container " + containerId +
" should throw the exception onContainerResourceIncreased");
return;
}
TestData td = testMap.get(OpsToTest.INCR);
td.success.addAndGet(1);
td.successArray.set(containerId.getId(), 1);
// move on to the following success tests
asyncClient.reInitializeContainerAsync(containerId,
Records.newRecord(ContainerLaunchContext.class), true);
// throw a fake user exception, and shouldn't crash the test
throw new RuntimeException("Ignorable Exception");
}
@SuppressWarnings("deprecation")
@Override
public void onContainerResourceUpdated(ContainerId containerId,
Resource resource) {
if (containerId.getId() >= expectedSuccess) {
errorMsgs.add("Container " + containerId +
" should throw the exception onContainerResourceUpdated");
return;
}
TestData td = testMap.get(OpsToTest.INCR);
td.success.addAndGet(1);
td.successArray.set(containerId.getId(), 1);
// move on to the following success tests
asyncClient.reInitializeContainerAsync(containerId,
Records.newRecord(ContainerLaunchContext.class), true);
// throw a fake user exception, and shouldn't crash the test
throw new RuntimeException("Ignorable Exception");
}
@SuppressWarnings("deprecation")
@Override
public void onContainerReInitialize(ContainerId containerId) {
if (containerId.getId() >= expectedSuccess) {
errorMsgs.add("Container " + containerId +
" should throw the exception onContainerReInitialize");
return;
}
TestData td = testMap.get(OpsToTest.REINIT);
td.success.addAndGet(1);
td.successArray.set(containerId.getId(), 1);
// move on to the following success tests
asyncClient.restartContainerAsync(containerId);
// throw a fake user exception, and shouldn't crash the test
throw new RuntimeException("Ignorable Exception");
}
@SuppressWarnings("deprecation")
@Override
public void onContainerRestart(ContainerId containerId) {
if (containerId.getId() >= expectedSuccess) {
errorMsgs.add("Container " + containerId +
" should throw the exception onContainerReInitialize");
return;
}
TestData td = testMap.get(OpsToTest.RESTART);
td.success.addAndGet(1);
td.successArray.set(containerId.getId(), 1);
// move on to the following success tests
asyncClient.rollbackLastReInitializationAsync(containerId);
// throw a fake user exception, and shouldn't crash the test
throw new RuntimeException("Ignorable Exception");
}
@SuppressWarnings("deprecation")
@Override
public void onRollbackLastReInitialization(ContainerId containerId) {
if (containerId.getId() >= expectedSuccess) {
errorMsgs.add("Container " + containerId +
" should throw the exception onContainerReInitialize");
return;
}
TestData td = testMap.get(OpsToTest.ROLLBACK);
td.success.addAndGet(1);
td.successArray.set(containerId.getId(), 1);
// move on to the following success tests
asyncClient.commitLastReInitializationAsync(containerId);
// throw a fake user exception, and shouldn't crash the test
throw new RuntimeException("Ignorable Exception");
}
@SuppressWarnings("deprecation")
@Override
public void onCommitLastReInitialization(ContainerId containerId) {
if (containerId.getId() >= expectedSuccess) {
errorMsgs.add("Container " + containerId +
" should throw the exception onContainerReInitialize");
return;
}
TestData td = testMap.get(OpsToTest.COMMIT);
td.success.addAndGet(1);
td.successArray.set(containerId.getId(), 1);
// move on to the following success tests
asyncClient.stopContainerAsync(containerId, nodeId);
// throw a fake user exception, and shouldn't crash the test
throw new RuntimeException("Ignorable Exception");
}
@SuppressWarnings("deprecation")
@Override
public void onContainerStopped(ContainerId containerId) {
if (containerId.getId() >= expectedSuccess) {
errorMsgs.add("Container " + containerId +
" should throw the exception onContainerStopped");
return;
}
TestData td = testMap.get(OpsToTest.STOP);
td.success.addAndGet(1);
td.successArray.set(containerId.getId(), 1);
// Shouldn't crash the test thread
throw new RuntimeException("Ignorable Exception");
}
@SuppressWarnings("deprecation")
@Override
public void onStartContainerError(ContainerId containerId, Throwable t) {
// If the unexpected throwable comes from success callback functions, it
// will be handled by the error callback functions. Therefore, monitor
// the exception here
if (t instanceof RuntimeException) {
errorMsgs.add("Unexpected throwable from callback functions should be" +
" ignored by Container " + containerId);
}
if (containerId.getId() < expectedSuccess) {
errorMsgs.add("Container " + containerId +
" shouldn't throw the exception onStartContainerError");
return;
}
TestData td = testMap.get(OpsToTest.START);
td.failure.addAndGet(1);
td.failureArray.set(containerId.getId() - expectedSuccess, 1);
// move on to the following failure tests
asyncClient.getContainerStatusAsync(containerId, nodeId);
// Shouldn't crash the test thread
throw new RuntimeException("Ignorable Exception");
}
@SuppressWarnings("deprecation")
@Override
public void onIncreaseContainerResourceError(
ContainerId containerId, Throwable t) {
if (containerId.getId() < expectedSuccess + expectedFailure) {
errorMsgs.add("Container " + containerId +
" shouldn't throw the exception onIncreaseContainerResourceError");
return;
}
TestData td = testMap.get(OpsToTest.INCR);
td.failure.addAndGet(1);
td.failureArray.set(
containerId.getId() - expectedSuccess - expectedFailure, 1);
// increase container resource error should NOT change the
// the container status to FAILED
// move on to the following failure tests
asyncClient.stopContainerAsync(containerId, nodeId);
// Shouldn't crash the test thread
throw new RuntimeException("Ignorable Exception");
}
@SuppressWarnings("deprecation")
@Override
public void onUpdateContainerResourceError(ContainerId containerId,
Throwable t) {
if (containerId.getId() < expectedSuccess + expectedFailure) {
errorMsgs.add("Container " + containerId +
" shouldn't throw the exception onUpdatedContainerResourceError");
return;
}
TestData td = testMap.get(OpsToTest.INCR);
td.failure.addAndGet(1);
td.failureArray.set(
containerId.getId() - expectedSuccess - expectedFailure, 1);
// increase container resource error should NOT change the
// the container status to FAILED
// move on to the following failure tests
asyncClient.stopContainerAsync(containerId, nodeId);
// Shouldn't crash the test thread
throw new RuntimeException("Ignorable Exception");
}
@SuppressWarnings("deprecation")
@Override
public void onContainerReInitializeError(ContainerId containerId,
Throwable t) {
if (containerId.getId() < expectedSuccess + expectedFailure) {
errorMsgs.add("Container " + containerId +
" shouldn't throw the exception onContainerReInitializeError");
return;
}
TestData td = testMap.get(OpsToTest.REINIT);
td.failure.addAndGet(1);
td.failureArray.set(
containerId.getId() - expectedSuccess - expectedFailure, 1);
// increment the stop counters here.. since the container will fail
td = testMap.get(OpsToTest.STOP);
td.failure.addAndGet(1);
td.failureArray.set(
containerId.getId() - expectedSuccess - expectedFailure, 1);
// reInit container changes the container status to FAILED
// Shouldn't crash the test thread
throw new RuntimeException("Ignorable Exception");
}
@SuppressWarnings("deprecation")
@Override
public void onContainerRestartError(ContainerId containerId, Throwable t) {
if (containerId.getId() < expectedSuccess + expectedFailure) {
errorMsgs.add("Container " + containerId +
" shouldn't throw the exception onContainerRestartError");
return;
}
TestData td = testMap.get(OpsToTest.RESTART);
td.failure.addAndGet(1);
td.failureArray.set(
containerId.getId() - expectedSuccess - expectedFailure, 1);
// increment the stop counters here.. since the container will fail
td = testMap.get(OpsToTest.STOP);
td.failure.addAndGet(1);
td.failureArray.set(
containerId.getId() - expectedSuccess - expectedFailure, 1);
// restart container changes the container status to FAILED
// Shouldn't crash the test thread
throw new RuntimeException("Ignorable Exception");
}
@SuppressWarnings("deprecation")
@Override
public void onRollbackLastReInitializationError(ContainerId containerId,
Throwable t) {
if (containerId.getId() < expectedSuccess + expectedFailure) {
errorMsgs.add("Container " + containerId +
" shouldn't throw the exception" +
" onRollbackLastReInitializationError");
return;
}
TestData td = testMap.get(OpsToTest.ROLLBACK);
td.failure.addAndGet(1);
td.failureArray.set(
containerId.getId() - expectedSuccess - expectedFailure, 1);
// increment the stop counters here.. since the container will fail
td = testMap.get(OpsToTest.STOP);
td.failure.addAndGet(1);
td.failureArray.set(
containerId.getId() - expectedSuccess - expectedFailure, 1);
// rollback container changes the container status to FAILED
// Shouldn't crash the test thread
throw new RuntimeException("Ignorable Exception");
}
@SuppressWarnings("deprecation")
@Override
public void onCommitLastReInitializationError(ContainerId containerId,
Throwable t) {
if (containerId.getId() < expectedSuccess + expectedFailure) {
errorMsgs.add("Container " + containerId +
" shouldn't throw the exception onCommitLastReInitializationError");
return;
}
TestData td = testMap.get(OpsToTest.COMMIT);
td.failure.addAndGet(1);
td.failureArray.set(
containerId.getId() - expectedSuccess - expectedFailure, 1);
// increment the stop counters here.. since the container will fail
td = testMap.get(OpsToTest.STOP);
td.failure.addAndGet(1);
td.failureArray.set(
containerId.getId() - expectedSuccess - expectedFailure, 1);
// commit container changes the container status to FAILED
// Shouldn't crash the test thread
throw new RuntimeException("Ignorable Exception");
}
@SuppressWarnings("deprecation")
@Override
public void onStopContainerError(ContainerId containerId, Throwable t) {
if (t instanceof RuntimeException) {
errorMsgs.add("Unexpected throwable from callback functions should be" +
" ignored by Container " + containerId);
}
if (containerId.getId() < expectedSuccess + expectedFailure) {
errorMsgs.add("Container " + containerId +
" shouldn't throw the exception onStopContainerError");
return;
}
TestData td = testMap.get(OpsToTest.STOP);
td.failure.addAndGet(1);
td.failureArray.set(
containerId.getId() - expectedSuccess - expectedFailure, 1);
// Shouldn't crash the test thread
throw new RuntimeException("Ignorable Exception");
}
@SuppressWarnings("deprecation")
@Override
public void onGetContainerStatusError(ContainerId containerId,
Throwable t) {
if (t instanceof RuntimeException) {
errorMsgs.add("Unexpected throwable from callback functions should be"
+ " ignored by Container " + containerId);
}
if (containerId.getId() < expectedSuccess) {
errorMsgs.add("Container " + containerId +
" shouldn't throw the exception onGetContainerStatusError");
return;
}
TestData td = testMap.get(OpsToTest.QUERY);
td.failure.addAndGet(1);
td.failureArray.set(containerId.getId() - expectedSuccess, 1);
// Shouldn't crash the test thread
throw new RuntimeException("Ignorable Exception");
}
public boolean isAllSuccessCallsExecuted() {
boolean isAllSuccessCallsExecuted =
testMap.get(OpsToTest.START).success.get() == expectedSuccess &&
testMap.get(OpsToTest.QUERY).success.get() == expectedSuccess &&
testMap.get(OpsToTest.INCR).success.get() == expectedSuccess &&
testMap.get(OpsToTest.REINIT).success.get() == expectedSuccess &&
testMap.get(OpsToTest.RESTART).success.get() == expectedSuccess &&
testMap.get(OpsToTest.ROLLBACK).success.get() ==
expectedSuccess &&
testMap.get(OpsToTest.COMMIT).success.get() == expectedSuccess &&
testMap.get(OpsToTest.STOP).success.get() == expectedSuccess;
if (isAllSuccessCallsExecuted) {
assertAtomicIntegerArray(testMap.get(OpsToTest.START).successArray);
assertAtomicIntegerArray(testMap.get(OpsToTest.QUERY).successArray);
assertAtomicIntegerArray(testMap.get(OpsToTest.INCR).successArray);
assertAtomicIntegerArray(testMap.get(OpsToTest.REINIT).successArray);
assertAtomicIntegerArray(testMap.get(OpsToTest.RESTART).successArray);
assertAtomicIntegerArray(testMap.get(OpsToTest.ROLLBACK).successArray);
assertAtomicIntegerArray(testMap.get(OpsToTest.COMMIT).successArray);
assertAtomicIntegerArray(testMap.get(OpsToTest.STOP).successArray);
}
return isAllSuccessCallsExecuted;
}
public boolean isStartAndQueryFailureCallsExecuted() {
boolean isStartAndQueryFailureCallsExecuted =
testMap.get(OpsToTest.START).failure.get() == expectedFailure &&
testMap.get(OpsToTest.QUERY).failure.get() == expectedFailure;
if (isStartAndQueryFailureCallsExecuted) {
assertAtomicIntegerArray(testMap.get(OpsToTest.START).failureArray);
assertAtomicIntegerArray(testMap.get(OpsToTest.QUERY).failureArray);
}
return isStartAndQueryFailureCallsExecuted;
}
public boolean isIncreaseResourceFailureCallsExecuted() {
boolean isIncreaseResourceFailureCallsExecuted =
testMap.get(OpsToTest.INCR).failure.get()
+ testMap.get(OpsToTest.REINIT).failure.get()
+ testMap.get(OpsToTest.RESTART).failure.get()
+ testMap.get(OpsToTest.ROLLBACK).failure.get()
+ testMap.get(OpsToTest.COMMIT).failure.get()
== expectedFailure;
if (isIncreaseResourceFailureCallsExecuted) {
AtomicIntegerArray testArray =
new AtomicIntegerArray(
testMap.get(OpsToTest.INCR).failureArray.length());
for (int i = 0; i < testArray.length(); i++) {
for (OpsToTest op : EnumSet.of(OpsToTest.REINIT, OpsToTest.RESTART,
OpsToTest.ROLLBACK, OpsToTest.COMMIT, OpsToTest.INCR)) {
testArray.addAndGet(i, testMap.get(op).failureArray.get(i));
}
}
assertAtomicIntegerArray(testArray);
}
return isIncreaseResourceFailureCallsExecuted;
}
public boolean isStopFailureCallsExecuted() {
boolean isStopFailureCallsExecuted =
testMap.get(OpsToTest.STOP).failure.get() == expectedFailure;
if (isStopFailureCallsExecuted) {
assertAtomicIntegerArray(testMap.get(OpsToTest.STOP).failureArray);
}
return isStopFailureCallsExecuted;
}
private void assertAtomicIntegerArray(AtomicIntegerArray array) {
for (int i = 0; i < array.length(); ++i) {
assertEquals(1, array.get(i));
}
}
}
private NMClient mockNMClient(int mode)
throws YarnException, IOException {
NMClient client = mock(NMClient.class);
switch (mode) {
case 0:
when(client.startContainer(any(Container.class),
any(ContainerLaunchContext.class))).thenReturn(
Collections.<String, ByteBuffer>emptyMap());
when(client.getContainerStatus(any(ContainerId.class),
any(NodeId.class))).thenReturn(
recordFactory.newRecordInstance(ContainerStatus.class));
doNothing().when(client).updateContainerResource(
any(Container.class));
doNothing().when(client).reInitializeContainer(
any(ContainerId.class), any(ContainerLaunchContext.class),
anyBoolean());
doNothing().when(client).restartContainer(any(ContainerId.class));
doNothing().when(client).rollbackLastReInitialization(
any(ContainerId.class));
doNothing().when(client).commitLastReInitialization(
any(ContainerId.class));
doNothing().when(client).stopContainer(any(ContainerId.class),
any(NodeId.class));
break;
case 1:
doThrow(RPCUtil.getRemoteException("Start Exception")).when(client)
.startContainer(any(Container.class),
any(ContainerLaunchContext.class));
doThrow(RPCUtil.getRemoteException("Query Exception")).when(client)
.getContainerStatus(any(ContainerId.class), any(NodeId.class));
doThrow(RPCUtil.getRemoteException("Stop Exception")).when(client)
.stopContainer(any(ContainerId.class), any(NodeId.class));
break;
case 2:
when(client.startContainer(any(Container.class),
any(ContainerLaunchContext.class))).thenReturn(
Collections.<String, ByteBuffer>emptyMap());
when(client.getContainerStatus(any(ContainerId.class),
any(NodeId.class))).thenReturn(
recordFactory.newRecordInstance(ContainerStatus.class));
doThrow(RPCUtil.getRemoteException("Increase Resource Exception"))
.when(client).updateContainerResource(any(Container.class));
doThrow(RPCUtil.getRemoteException("ReInitialize Exception"))
.when(client).reInitializeContainer(
any(ContainerId.class), any(ContainerLaunchContext.class),
anyBoolean());
doThrow(RPCUtil.getRemoteException("Restart Exception"))
.when(client).restartContainer(any(ContainerId.class));
doThrow(RPCUtil.getRemoteException("Rollback upgrade Exception"))
.when(client).rollbackLastReInitialization(
any(ContainerId.class));
doThrow(RPCUtil.getRemoteException("Commit upgrade Exception"))
.when(client).commitLastReInitialization(
any(ContainerId.class));
doThrow(RPCUtil.getRemoteException("Stop Exception")).when(client)
.stopContainer(any(ContainerId.class), any(NodeId.class));
}
when(client.getNodeIdOfStartedContainer(any(ContainerId.class)))
.thenReturn(NodeId.newInstance("localhost", 0));
return client;
}
@Test
@Timeout(value = 10)
public void testOutOfOrder() throws Exception {
CyclicBarrier barrierA = new CyclicBarrier(2);
CyclicBarrier barrierB = new CyclicBarrier(2);
CyclicBarrier barrierC = new CyclicBarrier(2);
asyncClient = new MockNMClientAsync2(barrierA, barrierB, barrierC);
asyncClient.init(new Configuration());
asyncClient.start();
final Container container = mockContainer(1);
final ContainerLaunchContext clc =
recordFactory.newRecordInstance(ContainerLaunchContext.class);
// start container from another thread
Thread t = new Thread() {
@Override
public void run() {
asyncClient.startContainerAsync(container, clc);
}
};
t.start();
barrierA.await();
asyncClient.stopContainerAsync(container.getId(), container.getNodeId());
barrierC.await();
assertFalse(((TestCallbackHandler2) asyncClient.getCallbackHandler())
.exceptionOccurred.get(), "Starting and stopping should be out of order");
}
private class MockNMClientAsync2 extends NMClientAsyncImpl {
private CyclicBarrier barrierA;
private CyclicBarrier barrierB;
protected MockNMClientAsync2(CyclicBarrier barrierA, CyclicBarrier barrierB,
CyclicBarrier barrierC) throws YarnException, IOException {
super(MockNMClientAsync2.class.getName(), mockNMClient(0),
new TestCallbackHandler2(barrierC));
this.barrierA = barrierA;
this.barrierB = barrierB;
}
private class MockContainerEventProcessor extends ContainerEventProcessor {
public MockContainerEventProcessor(ContainerEvent event) {
super(event);
}
@Override
public void run() {
try {
if (event.getType() == ContainerEventType.START_CONTAINER) {
barrierA.await();
barrierB.await();
}
super.run();
if (event.getType() == ContainerEventType.STOP_CONTAINER) {
barrierB.await();
}
} catch (InterruptedException e) {
e.printStackTrace();
} catch (BrokenBarrierException e) {
e.printStackTrace();
}
}
}
@Override
protected ContainerEventProcessor getContainerEventProcessor(
ContainerEvent event) {
return new MockContainerEventProcessor(event);
}
}
private class TestCallbackHandler2
extends NMClientAsync.AbstractCallbackHandler {
private CyclicBarrier barrierC;
private AtomicBoolean exceptionOccurred = new AtomicBoolean(false);
public TestCallbackHandler2(CyclicBarrier barrierC) {
this.barrierC = barrierC;
}
@Override
public void onContainerStarted(ContainerId containerId,
Map<String, ByteBuffer> allServiceResponse) {
}
@Override
public void onContainerStatusReceived(ContainerId containerId,
ContainerStatus containerStatus) {
}
@Deprecated
@Override
public void onContainerResourceIncreased(
ContainerId containerId, Resource resource) {}
@Override
public void onContainerResourceUpdated(ContainerId containerId,
Resource resource) {
}
@Override
public void onContainerStopped(ContainerId containerId) {
}
@Override
public void onStartContainerError(ContainerId containerId, Throwable t) {
if (!t.getMessage().equals(NMClientAsyncImpl.StatefulContainer
.OutOfOrderTransition.STOP_BEFORE_START_ERROR_MSG)) {
exceptionOccurred.set(true);
return;
}
try {
barrierC.await();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (BrokenBarrierException e) {
e.printStackTrace();
}
}
@Override
public void onGetContainerStatusError(ContainerId containerId,
Throwable t) {
}
@Deprecated
@Override
public void onIncreaseContainerResourceError(
ContainerId containerId, Throwable t) {}
@Override
public void onUpdateContainerResourceError(ContainerId containerId,
Throwable t) {
}
@Override
public void onStopContainerError(ContainerId containerId, Throwable t) {
}
}
private Container mockContainer(int i) {
ApplicationId appId =
ApplicationId.newInstance(System.currentTimeMillis(), 1);
ApplicationAttemptId attemptId =
ApplicationAttemptId.newInstance(appId, 1);
ContainerId containerId = ContainerId.newContainerId(attemptId, i);
nodeId = NodeId.newInstance("localhost", 0);
// Create an empty record
containerToken = recordFactory.newRecordInstance(Token.class);
return Container.newInstance(containerId, nodeId, null, null, null,
containerToken);
}
}
|
googleapis/google-api-java-client-services | 35,945 | clients/google-api-services-firebasestorage/v1beta/1.31.0/com/google/api/services/firebasestorage/v1beta/Firebasestorage.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.firebasestorage.v1beta;
/**
* Service definition for Firebasestorage (v1beta).
*
* <p>
* The Cloud Storage for Firebase API enables programmatic management of Cloud Storage buckets for use in Firebase projects
* </p>
*
* <p>
* For more information about this service, see the
* <a href="https://firebase.google.com/docs/storage" target="_blank">API Documentation</a>
* </p>
*
* <p>
* This service uses {@link FirebasestorageRequestInitializer} to initialize global parameters via its
* {@link Builder}.
* </p>
*
* @since 1.3
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public class Firebasestorage extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient {
// Note: Leave this static initializer at the top of the file.
static {
com.google.api.client.util.Preconditions.checkState(
com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 &&
(com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 32 ||
(com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION == 31 &&
com.google.api.client.googleapis.GoogleUtils.BUGFIX_VERSION >= 1)),
"You are currently running with version %s of google-api-client. " +
"You need at least version 1.31.1 of google-api-client to run version " +
"1.32.1 of the Cloud Storage for Firebase API library.", com.google.api.client.googleapis.GoogleUtils.VERSION);
}
/**
* The default encoded root URL of the service. This is determined when the library is generated
* and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_ROOT_URL = "https://firebasestorage.googleapis.com/";
/**
* The default encoded mTLS root URL of the service. This is determined when the library is generated
* and normally should not be changed.
*
* @since 1.31
*/
public static final String DEFAULT_MTLS_ROOT_URL = "https://firebasestorage.mtls.googleapis.com/";
/**
* The default encoded service path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_SERVICE_PATH = "";
/**
* The default encoded batch path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.23
*/
public static final String DEFAULT_BATCH_PATH = "batch";
/**
* The default encoded base URL of the service. This is determined when the library is generated
* and normally should not be changed.
*/
public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH;
/**
* Constructor.
*
* <p>
* Use {@link Builder} if you need to specify any of the optional parameters.
* </p>
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public Firebasestorage(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
this(new Builder(transport, jsonFactory, httpRequestInitializer));
}
/**
* @param builder builder
*/
Firebasestorage(Builder builder) {
super(builder);
}
@Override
protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException {
super.initialize(httpClientRequest);
}
/**
* An accessor for creating requests from the Projects collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code Firebasestorage firebasestorage = new Firebasestorage(...);}
* {@code Firebasestorage.Projects.List request = firebasestorage.projects().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Projects projects() {
return new Projects();
}
/**
* The "projects" collection of methods.
*/
public class Projects {
/**
* An accessor for creating requests from the Buckets collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code Firebasestorage firebasestorage = new Firebasestorage(...);}
* {@code Firebasestorage.Buckets.List request = firebasestorage.buckets().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Buckets buckets() {
return new Buckets();
}
/**
* The "buckets" collection of methods.
*/
public class Buckets {
/**
* Links a Google Cloud Storage bucket to a Firebase project.
*
* Create a request for the method "buckets.addFirebase".
*
* This request holds the parameters needed by the firebasestorage server. After setting any
* optional parameters, call the {@link AddFirebase#execute()} method to invoke the remote
* operation.
*
* @param bucket Required. Resource name of the bucket, mirrors the ID of the underlying Google Cloud Storage bucket,
* `projects/{project_number}/buckets/{bucket_id}`.
* @param content the {@link com.google.api.services.firebasestorage.v1beta.model.AddFirebaseRequest}
* @return the request
*/
public AddFirebase addFirebase(java.lang.String bucket, com.google.api.services.firebasestorage.v1beta.model.AddFirebaseRequest content) throws java.io.IOException {
AddFirebase result = new AddFirebase(bucket, content);
initialize(result);
return result;
}
public class AddFirebase extends FirebasestorageRequest<com.google.api.services.firebasestorage.v1beta.model.Bucket> {
private static final String REST_PATH = "v1beta/{+bucket}:addFirebase";
private final java.util.regex.Pattern BUCKET_PATTERN =
java.util.regex.Pattern.compile("^projects/[^/]+/buckets/[^/]+$");
/**
* Links a Google Cloud Storage bucket to a Firebase project.
*
* Create a request for the method "buckets.addFirebase".
*
* This request holds the parameters needed by the the firebasestorage server. After setting any
* optional parameters, call the {@link AddFirebase#execute()} method to invoke the remote
* operation. <p> {@link
* AddFirebase#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
* must be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param bucket Required. Resource name of the bucket, mirrors the ID of the underlying Google Cloud Storage bucket,
* `projects/{project_number}/buckets/{bucket_id}`.
* @param content the {@link com.google.api.services.firebasestorage.v1beta.model.AddFirebaseRequest}
* @since 1.13
*/
protected AddFirebase(java.lang.String bucket, com.google.api.services.firebasestorage.v1beta.model.AddFirebaseRequest content) {
super(Firebasestorage.this, "POST", REST_PATH, content, com.google.api.services.firebasestorage.v1beta.model.Bucket.class);
this.bucket = com.google.api.client.util.Preconditions.checkNotNull(bucket, "Required parameter bucket must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(BUCKET_PATTERN.matcher(bucket).matches(),
"Parameter bucket must conform to the pattern " +
"^projects/[^/]+/buckets/[^/]+$");
}
}
@Override
public AddFirebase set$Xgafv(java.lang.String $Xgafv) {
return (AddFirebase) super.set$Xgafv($Xgafv);
}
@Override
public AddFirebase setAccessToken(java.lang.String accessToken) {
return (AddFirebase) super.setAccessToken(accessToken);
}
@Override
public AddFirebase setAlt(java.lang.String alt) {
return (AddFirebase) super.setAlt(alt);
}
@Override
public AddFirebase setCallback(java.lang.String callback) {
return (AddFirebase) super.setCallback(callback);
}
@Override
public AddFirebase setFields(java.lang.String fields) {
return (AddFirebase) super.setFields(fields);
}
@Override
public AddFirebase setKey(java.lang.String key) {
return (AddFirebase) super.setKey(key);
}
@Override
public AddFirebase setOauthToken(java.lang.String oauthToken) {
return (AddFirebase) super.setOauthToken(oauthToken);
}
@Override
public AddFirebase setPrettyPrint(java.lang.Boolean prettyPrint) {
return (AddFirebase) super.setPrettyPrint(prettyPrint);
}
@Override
public AddFirebase setQuotaUser(java.lang.String quotaUser) {
return (AddFirebase) super.setQuotaUser(quotaUser);
}
@Override
public AddFirebase setUploadType(java.lang.String uploadType) {
return (AddFirebase) super.setUploadType(uploadType);
}
@Override
public AddFirebase setUploadProtocol(java.lang.String uploadProtocol) {
return (AddFirebase) super.setUploadProtocol(uploadProtocol);
}
/**
* Required. Resource name of the bucket, mirrors the ID of the underlying Google Cloud
* Storage bucket, `projects/{project_number}/buckets/{bucket_id}`.
*/
@com.google.api.client.util.Key
private java.lang.String bucket;
/** Required. Resource name of the bucket, mirrors the ID of the underlying Google Cloud Storage
bucket, `projects/{project_number}/buckets/{bucket_id}`.
*/
public java.lang.String getBucket() {
return bucket;
}
/**
* Required. Resource name of the bucket, mirrors the ID of the underlying Google Cloud
* Storage bucket, `projects/{project_number}/buckets/{bucket_id}`.
*/
public AddFirebase setBucket(java.lang.String bucket) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(BUCKET_PATTERN.matcher(bucket).matches(),
"Parameter bucket must conform to the pattern " +
"^projects/[^/]+/buckets/[^/]+$");
}
this.bucket = bucket;
return this;
}
@Override
public AddFirebase set(String parameterName, Object value) {
return (AddFirebase) super.set(parameterName, value);
}
}
/**
* Gets a single linked storage bucket.
*
* Create a request for the method "buckets.get".
*
* This request holds the parameters needed by the firebasestorage server. After setting any
* optional parameters, call the {@link Get#execute()} method to invoke the remote operation.
*
* @param name Required. Resource name of the bucket, mirrors the ID of the underlying Google Cloud Storage bucket,
* `projects/{project_number}/buckets/{bucket_id}`.
* @return the request
*/
public Get get(java.lang.String name) throws java.io.IOException {
Get result = new Get(name);
initialize(result);
return result;
}
public class Get extends FirebasestorageRequest<com.google.api.services.firebasestorage.v1beta.model.Bucket> {
private static final String REST_PATH = "v1beta/{+name}";
private final java.util.regex.Pattern NAME_PATTERN =
java.util.regex.Pattern.compile("^projects/[^/]+/buckets/[^/]+$");
/**
* Gets a single linked storage bucket.
*
* Create a request for the method "buckets.get".
*
* This request holds the parameters needed by the the firebasestorage server. After setting any
* optional parameters, call the {@link Get#execute()} method to invoke the remote operation. <p>
* {@link Get#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
* must be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param name Required. Resource name of the bucket, mirrors the ID of the underlying Google Cloud Storage bucket,
* `projects/{project_number}/buckets/{bucket_id}`.
* @since 1.13
*/
protected Get(java.lang.String name) {
super(Firebasestorage.this, "GET", REST_PATH, null, com.google.api.services.firebasestorage.v1beta.model.Bucket.class);
this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^projects/[^/]+/buckets/[^/]+$");
}
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public Get set$Xgafv(java.lang.String $Xgafv) {
return (Get) super.set$Xgafv($Xgafv);
}
@Override
public Get setAccessToken(java.lang.String accessToken) {
return (Get) super.setAccessToken(accessToken);
}
@Override
public Get setAlt(java.lang.String alt) {
return (Get) super.setAlt(alt);
}
@Override
public Get setCallback(java.lang.String callback) {
return (Get) super.setCallback(callback);
}
@Override
public Get setFields(java.lang.String fields) {
return (Get) super.setFields(fields);
}
@Override
public Get setKey(java.lang.String key) {
return (Get) super.setKey(key);
}
@Override
public Get setOauthToken(java.lang.String oauthToken) {
return (Get) super.setOauthToken(oauthToken);
}
@Override
public Get setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Get) super.setPrettyPrint(prettyPrint);
}
@Override
public Get setQuotaUser(java.lang.String quotaUser) {
return (Get) super.setQuotaUser(quotaUser);
}
@Override
public Get setUploadType(java.lang.String uploadType) {
return (Get) super.setUploadType(uploadType);
}
@Override
public Get setUploadProtocol(java.lang.String uploadProtocol) {
return (Get) super.setUploadProtocol(uploadProtocol);
}
/**
* Required. Resource name of the bucket, mirrors the ID of the underlying Google Cloud
* Storage bucket, `projects/{project_number}/buckets/{bucket_id}`.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/** Required. Resource name of the bucket, mirrors the ID of the underlying Google Cloud Storage
bucket, `projects/{project_number}/buckets/{bucket_id}`.
*/
public java.lang.String getName() {
return name;
}
/**
* Required. Resource name of the bucket, mirrors the ID of the underlying Google Cloud
* Storage bucket, `projects/{project_number}/buckets/{bucket_id}`.
*/
public Get setName(java.lang.String name) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^projects/[^/]+/buckets/[^/]+$");
}
this.name = name;
return this;
}
@Override
public Get set(String parameterName, Object value) {
return (Get) super.set(parameterName, value);
}
}
/**
* Lists the linked storage buckets for a project.
*
* Create a request for the method "buckets.list".
*
* This request holds the parameters needed by the firebasestorage server. After setting any
* optional parameters, call the {@link List#execute()} method to invoke the remote operation.
*
* @param parent Required. Resource name of the parent Firebase project, `projects/{project_number}`.
* @return the request
*/
public List list(java.lang.String parent) throws java.io.IOException {
List result = new List(parent);
initialize(result);
return result;
}
public class List extends FirebasestorageRequest<com.google.api.services.firebasestorage.v1beta.model.ListBucketsResponse> {
private static final String REST_PATH = "v1beta/{+parent}/buckets";
private final java.util.regex.Pattern PARENT_PATTERN =
java.util.regex.Pattern.compile("^projects/[^/]+$");
/**
* Lists the linked storage buckets for a project.
*
* Create a request for the method "buckets.list".
*
* This request holds the parameters needed by the the firebasestorage server. After setting any
* optional parameters, call the {@link List#execute()} method to invoke the remote operation. <p>
* {@link List#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
* must be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param parent Required. Resource name of the parent Firebase project, `projects/{project_number}`.
* @since 1.13
*/
protected List(java.lang.String parent) {
super(Firebasestorage.this, "GET", REST_PATH, null, com.google.api.services.firebasestorage.v1beta.model.ListBucketsResponse.class);
this.parent = com.google.api.client.util.Preconditions.checkNotNull(parent, "Required parameter parent must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(PARENT_PATTERN.matcher(parent).matches(),
"Parameter parent must conform to the pattern " +
"^projects/[^/]+$");
}
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public List set$Xgafv(java.lang.String $Xgafv) {
return (List) super.set$Xgafv($Xgafv);
}
@Override
public List setAccessToken(java.lang.String accessToken) {
return (List) super.setAccessToken(accessToken);
}
@Override
public List setAlt(java.lang.String alt) {
return (List) super.setAlt(alt);
}
@Override
public List setCallback(java.lang.String callback) {
return (List) super.setCallback(callback);
}
@Override
public List setFields(java.lang.String fields) {
return (List) super.setFields(fields);
}
@Override
public List setKey(java.lang.String key) {
return (List) super.setKey(key);
}
@Override
public List setOauthToken(java.lang.String oauthToken) {
return (List) super.setOauthToken(oauthToken);
}
@Override
public List setPrettyPrint(java.lang.Boolean prettyPrint) {
return (List) super.setPrettyPrint(prettyPrint);
}
@Override
public List setQuotaUser(java.lang.String quotaUser) {
return (List) super.setQuotaUser(quotaUser);
}
@Override
public List setUploadType(java.lang.String uploadType) {
return (List) super.setUploadType(uploadType);
}
@Override
public List setUploadProtocol(java.lang.String uploadProtocol) {
return (List) super.setUploadProtocol(uploadProtocol);
}
/** Required. Resource name of the parent Firebase project, `projects/{project_number}`. */
@com.google.api.client.util.Key
private java.lang.String parent;
/** Required. Resource name of the parent Firebase project, `projects/{project_number}`.
*/
public java.lang.String getParent() {
return parent;
}
/** Required. Resource name of the parent Firebase project, `projects/{project_number}`. */
public List setParent(java.lang.String parent) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(PARENT_PATTERN.matcher(parent).matches(),
"Parameter parent must conform to the pattern " +
"^projects/[^/]+$");
}
this.parent = parent;
return this;
}
/**
* The maximum number of buckets to return. If not set, the server will use a reasonable
* default.
*/
@com.google.api.client.util.Key
private java.lang.Integer pageSize;
/** The maximum number of buckets to return. If not set, the server will use a reasonable default.
*/
public java.lang.Integer getPageSize() {
return pageSize;
}
/**
* The maximum number of buckets to return. If not set, the server will use a reasonable
* default.
*/
public List setPageSize(java.lang.Integer pageSize) {
this.pageSize = pageSize;
return this;
}
/**
* A page token, received from a previous `ListBuckets` call. Provide this to retrieve the
* subsequent page. When paginating, all other parameters provided to `ListBuckets` must
* match the call that provided the page token.
*/
@com.google.api.client.util.Key
private java.lang.String pageToken;
/** A page token, received from a previous `ListBuckets` call. Provide this to retrieve the subsequent
page. When paginating, all other parameters provided to `ListBuckets` must match the call that
provided the page token.
*/
public java.lang.String getPageToken() {
return pageToken;
}
/**
* A page token, received from a previous `ListBuckets` call. Provide this to retrieve the
* subsequent page. When paginating, all other parameters provided to `ListBuckets` must
* match the call that provided the page token.
*/
public List setPageToken(java.lang.String pageToken) {
this.pageToken = pageToken;
return this;
}
@Override
public List set(String parameterName, Object value) {
return (List) super.set(parameterName, value);
}
}
/**
* Unlinks a linked Google Cloud Storage bucket from a Firebase project.
*
* Create a request for the method "buckets.removeFirebase".
*
* This request holds the parameters needed by the firebasestorage server. After setting any
* optional parameters, call the {@link RemoveFirebase#execute()} method to invoke the remote
* operation.
*
* @param bucket Required. Resource name of the bucket, mirrors the ID of the underlying Google Cloud Storage bucket,
* `projects/{project_number}/buckets/{bucket_id}`.
* @param content the {@link com.google.api.services.firebasestorage.v1beta.model.RemoveFirebaseRequest}
* @return the request
*/
public RemoveFirebase removeFirebase(java.lang.String bucket, com.google.api.services.firebasestorage.v1beta.model.RemoveFirebaseRequest content) throws java.io.IOException {
RemoveFirebase result = new RemoveFirebase(bucket, content);
initialize(result);
return result;
}
public class RemoveFirebase extends FirebasestorageRequest<com.google.api.services.firebasestorage.v1beta.model.Empty> {
private static final String REST_PATH = "v1beta/{+bucket}:removeFirebase";
private final java.util.regex.Pattern BUCKET_PATTERN =
java.util.regex.Pattern.compile("^projects/[^/]+/buckets/[^/]+$");
/**
* Unlinks a linked Google Cloud Storage bucket from a Firebase project.
*
* Create a request for the method "buckets.removeFirebase".
*
* This request holds the parameters needed by the the firebasestorage server. After setting any
* optional parameters, call the {@link RemoveFirebase#execute()} method to invoke the remote
* operation. <p> {@link RemoveFirebase#initialize(com.google.api.client.googleapis.services.Abstr
* actGoogleClientRequest)} must be called to initialize this instance immediately after invoking
* the constructor. </p>
*
* @param bucket Required. Resource name of the bucket, mirrors the ID of the underlying Google Cloud Storage bucket,
* `projects/{project_number}/buckets/{bucket_id}`.
* @param content the {@link com.google.api.services.firebasestorage.v1beta.model.RemoveFirebaseRequest}
* @since 1.13
*/
protected RemoveFirebase(java.lang.String bucket, com.google.api.services.firebasestorage.v1beta.model.RemoveFirebaseRequest content) {
super(Firebasestorage.this, "POST", REST_PATH, content, com.google.api.services.firebasestorage.v1beta.model.Empty.class);
this.bucket = com.google.api.client.util.Preconditions.checkNotNull(bucket, "Required parameter bucket must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(BUCKET_PATTERN.matcher(bucket).matches(),
"Parameter bucket must conform to the pattern " +
"^projects/[^/]+/buckets/[^/]+$");
}
}
@Override
public RemoveFirebase set$Xgafv(java.lang.String $Xgafv) {
return (RemoveFirebase) super.set$Xgafv($Xgafv);
}
@Override
public RemoveFirebase setAccessToken(java.lang.String accessToken) {
return (RemoveFirebase) super.setAccessToken(accessToken);
}
@Override
public RemoveFirebase setAlt(java.lang.String alt) {
return (RemoveFirebase) super.setAlt(alt);
}
@Override
public RemoveFirebase setCallback(java.lang.String callback) {
return (RemoveFirebase) super.setCallback(callback);
}
@Override
public RemoveFirebase setFields(java.lang.String fields) {
return (RemoveFirebase) super.setFields(fields);
}
@Override
public RemoveFirebase setKey(java.lang.String key) {
return (RemoveFirebase) super.setKey(key);
}
@Override
public RemoveFirebase setOauthToken(java.lang.String oauthToken) {
return (RemoveFirebase) super.setOauthToken(oauthToken);
}
@Override
public RemoveFirebase setPrettyPrint(java.lang.Boolean prettyPrint) {
return (RemoveFirebase) super.setPrettyPrint(prettyPrint);
}
@Override
public RemoveFirebase setQuotaUser(java.lang.String quotaUser) {
return (RemoveFirebase) super.setQuotaUser(quotaUser);
}
@Override
public RemoveFirebase setUploadType(java.lang.String uploadType) {
return (RemoveFirebase) super.setUploadType(uploadType);
}
@Override
public RemoveFirebase setUploadProtocol(java.lang.String uploadProtocol) {
return (RemoveFirebase) super.setUploadProtocol(uploadProtocol);
}
/**
* Required. Resource name of the bucket, mirrors the ID of the underlying Google Cloud
* Storage bucket, `projects/{project_number}/buckets/{bucket_id}`.
*/
@com.google.api.client.util.Key
private java.lang.String bucket;
/** Required. Resource name of the bucket, mirrors the ID of the underlying Google Cloud Storage
bucket, `projects/{project_number}/buckets/{bucket_id}`.
*/
public java.lang.String getBucket() {
return bucket;
}
/**
* Required. Resource name of the bucket, mirrors the ID of the underlying Google Cloud
* Storage bucket, `projects/{project_number}/buckets/{bucket_id}`.
*/
public RemoveFirebase setBucket(java.lang.String bucket) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(BUCKET_PATTERN.matcher(bucket).matches(),
"Parameter bucket must conform to the pattern " +
"^projects/[^/]+/buckets/[^/]+$");
}
this.bucket = bucket;
return this;
}
@Override
public RemoveFirebase set(String parameterName, Object value) {
return (RemoveFirebase) super.set(parameterName, value);
}
}
}
}
/**
* Builder for {@link Firebasestorage}.
*
* <p>
* Implementation is not thread-safe.
* </p>
*
* @since 1.3.0
*/
public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder {
private static String chooseEndpoint(com.google.api.client.http.HttpTransport transport) {
// If the GOOGLE_API_USE_MTLS_ENDPOINT environment variable value is "always", use mTLS endpoint.
// If the env variable is "auto", use mTLS endpoint if and only if the transport is mTLS.
// Use the regular endpoint for all other cases.
String useMtlsEndpoint = System.getenv("GOOGLE_API_USE_MTLS_ENDPOINT");
useMtlsEndpoint = useMtlsEndpoint == null ? "auto" : useMtlsEndpoint;
if ("always".equals(useMtlsEndpoint) || ("auto".equals(useMtlsEndpoint) && transport != null && transport.isMtls())) {
return DEFAULT_MTLS_ROOT_URL;
}
return DEFAULT_ROOT_URL;
}
/**
* Returns an instance of a new builder.
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
super(
transport,
jsonFactory,
Builder.chooseEndpoint(transport),
DEFAULT_SERVICE_PATH,
httpRequestInitializer,
false);
setBatchPath(DEFAULT_BATCH_PATH);
}
/** Builds a new instance of {@link Firebasestorage}. */
@Override
public Firebasestorage build() {
return new Firebasestorage(this);
}
@Override
public Builder setRootUrl(String rootUrl) {
return (Builder) super.setRootUrl(rootUrl);
}
@Override
public Builder setServicePath(String servicePath) {
return (Builder) super.setServicePath(servicePath);
}
@Override
public Builder setBatchPath(String batchPath) {
return (Builder) super.setBatchPath(batchPath);
}
@Override
public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
return (Builder) super.setHttpRequestInitializer(httpRequestInitializer);
}
@Override
public Builder setApplicationName(String applicationName) {
return (Builder) super.setApplicationName(applicationName);
}
@Override
public Builder setSuppressPatternChecks(boolean suppressPatternChecks) {
return (Builder) super.setSuppressPatternChecks(suppressPatternChecks);
}
@Override
public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) {
return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks);
}
@Override
public Builder setSuppressAllChecks(boolean suppressAllChecks) {
return (Builder) super.setSuppressAllChecks(suppressAllChecks);
}
/**
* Set the {@link FirebasestorageRequestInitializer}.
*
* @since 1.12
*/
public Builder setFirebasestorageRequestInitializer(
FirebasestorageRequestInitializer firebasestorageRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(firebasestorageRequestInitializer);
}
@Override
public Builder setGoogleClientRequestInitializer(
com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer);
}
}
}
|
googleapis/google-cloud-java | 35,947 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/MutateDeployedIndexRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/index_endpoint_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Request message for
* [IndexEndpointService.MutateDeployedIndex][google.cloud.aiplatform.v1.IndexEndpointService.MutateDeployedIndex].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.MutateDeployedIndexRequest}
*/
public final class MutateDeployedIndexRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.MutateDeployedIndexRequest)
MutateDeployedIndexRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use MutateDeployedIndexRequest.newBuilder() to construct.
private MutateDeployedIndexRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MutateDeployedIndexRequest() {
indexEndpoint_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new MutateDeployedIndexRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.IndexEndpointServiceProto
.internal_static_google_cloud_aiplatform_v1_MutateDeployedIndexRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.IndexEndpointServiceProto
.internal_static_google_cloud_aiplatform_v1_MutateDeployedIndexRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest.class,
com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest.Builder.class);
}
private int bitField0_;
public static final int INDEX_ENDPOINT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object indexEndpoint_ = "";
/**
*
*
* <pre>
* Required. The name of the IndexEndpoint resource into which to deploy an
* Index. Format:
* `projects/{project}/locations/{location}/indexEndpoints/{index_endpoint}`
* </pre>
*
* <code>
* string index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The indexEndpoint.
*/
@java.lang.Override
public java.lang.String getIndexEndpoint() {
java.lang.Object ref = indexEndpoint_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
indexEndpoint_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the IndexEndpoint resource into which to deploy an
* Index. Format:
* `projects/{project}/locations/{location}/indexEndpoints/{index_endpoint}`
* </pre>
*
* <code>
* string index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for indexEndpoint.
*/
@java.lang.Override
public com.google.protobuf.ByteString getIndexEndpointBytes() {
java.lang.Object ref = indexEndpoint_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
indexEndpoint_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DEPLOYED_INDEX_FIELD_NUMBER = 2;
private com.google.cloud.aiplatform.v1.DeployedIndex deployedIndex_;
/**
*
*
* <pre>
* Required. The DeployedIndex to be updated within the IndexEndpoint.
* Currently, the updatable fields are
* [DeployedIndex.automatic_resources][google.cloud.aiplatform.v1.DeployedIndex.automatic_resources]
* and
* [DeployedIndex.dedicated_resources][google.cloud.aiplatform.v1.DeployedIndex.dedicated_resources]
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.DeployedIndex deployed_index = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the deployedIndex field is set.
*/
@java.lang.Override
public boolean hasDeployedIndex() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The DeployedIndex to be updated within the IndexEndpoint.
* Currently, the updatable fields are
* [DeployedIndex.automatic_resources][google.cloud.aiplatform.v1.DeployedIndex.automatic_resources]
* and
* [DeployedIndex.dedicated_resources][google.cloud.aiplatform.v1.DeployedIndex.dedicated_resources]
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.DeployedIndex deployed_index = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The deployedIndex.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.DeployedIndex getDeployedIndex() {
return deployedIndex_ == null
? com.google.cloud.aiplatform.v1.DeployedIndex.getDefaultInstance()
: deployedIndex_;
}
/**
*
*
* <pre>
* Required. The DeployedIndex to be updated within the IndexEndpoint.
* Currently, the updatable fields are
* [DeployedIndex.automatic_resources][google.cloud.aiplatform.v1.DeployedIndex.automatic_resources]
* and
* [DeployedIndex.dedicated_resources][google.cloud.aiplatform.v1.DeployedIndex.dedicated_resources]
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.DeployedIndex deployed_index = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.DeployedIndexOrBuilder getDeployedIndexOrBuilder() {
return deployedIndex_ == null
? com.google.cloud.aiplatform.v1.DeployedIndex.getDefaultInstance()
: deployedIndex_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(indexEndpoint_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, indexEndpoint_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getDeployedIndex());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(indexEndpoint_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, indexEndpoint_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getDeployedIndex());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest other =
(com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest) obj;
if (!getIndexEndpoint().equals(other.getIndexEndpoint())) return false;
if (hasDeployedIndex() != other.hasDeployedIndex()) return false;
if (hasDeployedIndex()) {
if (!getDeployedIndex().equals(other.getDeployedIndex())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + INDEX_ENDPOINT_FIELD_NUMBER;
hash = (53 * hash) + getIndexEndpoint().hashCode();
if (hasDeployedIndex()) {
hash = (37 * hash) + DEPLOYED_INDEX_FIELD_NUMBER;
hash = (53 * hash) + getDeployedIndex().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [IndexEndpointService.MutateDeployedIndex][google.cloud.aiplatform.v1.IndexEndpointService.MutateDeployedIndex].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.MutateDeployedIndexRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.MutateDeployedIndexRequest)
com.google.cloud.aiplatform.v1.MutateDeployedIndexRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.IndexEndpointServiceProto
.internal_static_google_cloud_aiplatform_v1_MutateDeployedIndexRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.IndexEndpointServiceProto
.internal_static_google_cloud_aiplatform_v1_MutateDeployedIndexRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest.class,
com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getDeployedIndexFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
indexEndpoint_ = "";
deployedIndex_ = null;
if (deployedIndexBuilder_ != null) {
deployedIndexBuilder_.dispose();
deployedIndexBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.IndexEndpointServiceProto
.internal_static_google_cloud_aiplatform_v1_MutateDeployedIndexRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest build() {
com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest buildPartial() {
com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest result =
new com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.indexEndpoint_ = indexEndpoint_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.deployedIndex_ =
deployedIndexBuilder_ == null ? deployedIndex_ : deployedIndexBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest) {
return mergeFrom((com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest other) {
if (other == com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest.getDefaultInstance())
return this;
if (!other.getIndexEndpoint().isEmpty()) {
indexEndpoint_ = other.indexEndpoint_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasDeployedIndex()) {
mergeDeployedIndex(other.getDeployedIndex());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
indexEndpoint_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getDeployedIndexFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object indexEndpoint_ = "";
/**
*
*
* <pre>
* Required. The name of the IndexEndpoint resource into which to deploy an
* Index. Format:
* `projects/{project}/locations/{location}/indexEndpoints/{index_endpoint}`
* </pre>
*
* <code>
* string index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The indexEndpoint.
*/
public java.lang.String getIndexEndpoint() {
java.lang.Object ref = indexEndpoint_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
indexEndpoint_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the IndexEndpoint resource into which to deploy an
* Index. Format:
* `projects/{project}/locations/{location}/indexEndpoints/{index_endpoint}`
* </pre>
*
* <code>
* string index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for indexEndpoint.
*/
public com.google.protobuf.ByteString getIndexEndpointBytes() {
java.lang.Object ref = indexEndpoint_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
indexEndpoint_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the IndexEndpoint resource into which to deploy an
* Index. Format:
* `projects/{project}/locations/{location}/indexEndpoints/{index_endpoint}`
* </pre>
*
* <code>
* string index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The indexEndpoint to set.
* @return This builder for chaining.
*/
public Builder setIndexEndpoint(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
indexEndpoint_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the IndexEndpoint resource into which to deploy an
* Index. Format:
* `projects/{project}/locations/{location}/indexEndpoints/{index_endpoint}`
* </pre>
*
* <code>
* string index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearIndexEndpoint() {
indexEndpoint_ = getDefaultInstance().getIndexEndpoint();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the IndexEndpoint resource into which to deploy an
* Index. Format:
* `projects/{project}/locations/{location}/indexEndpoints/{index_endpoint}`
* </pre>
*
* <code>
* string index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for indexEndpoint to set.
* @return This builder for chaining.
*/
public Builder setIndexEndpointBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
indexEndpoint_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.aiplatform.v1.DeployedIndex deployedIndex_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.DeployedIndex,
com.google.cloud.aiplatform.v1.DeployedIndex.Builder,
com.google.cloud.aiplatform.v1.DeployedIndexOrBuilder>
deployedIndexBuilder_;
/**
*
*
* <pre>
* Required. The DeployedIndex to be updated within the IndexEndpoint.
* Currently, the updatable fields are
* [DeployedIndex.automatic_resources][google.cloud.aiplatform.v1.DeployedIndex.automatic_resources]
* and
* [DeployedIndex.dedicated_resources][google.cloud.aiplatform.v1.DeployedIndex.dedicated_resources]
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.DeployedIndex deployed_index = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the deployedIndex field is set.
*/
public boolean hasDeployedIndex() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The DeployedIndex to be updated within the IndexEndpoint.
* Currently, the updatable fields are
* [DeployedIndex.automatic_resources][google.cloud.aiplatform.v1.DeployedIndex.automatic_resources]
* and
* [DeployedIndex.dedicated_resources][google.cloud.aiplatform.v1.DeployedIndex.dedicated_resources]
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.DeployedIndex deployed_index = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The deployedIndex.
*/
public com.google.cloud.aiplatform.v1.DeployedIndex getDeployedIndex() {
if (deployedIndexBuilder_ == null) {
return deployedIndex_ == null
? com.google.cloud.aiplatform.v1.DeployedIndex.getDefaultInstance()
: deployedIndex_;
} else {
return deployedIndexBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The DeployedIndex to be updated within the IndexEndpoint.
* Currently, the updatable fields are
* [DeployedIndex.automatic_resources][google.cloud.aiplatform.v1.DeployedIndex.automatic_resources]
* and
* [DeployedIndex.dedicated_resources][google.cloud.aiplatform.v1.DeployedIndex.dedicated_resources]
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.DeployedIndex deployed_index = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDeployedIndex(com.google.cloud.aiplatform.v1.DeployedIndex value) {
if (deployedIndexBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
deployedIndex_ = value;
} else {
deployedIndexBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The DeployedIndex to be updated within the IndexEndpoint.
* Currently, the updatable fields are
* [DeployedIndex.automatic_resources][google.cloud.aiplatform.v1.DeployedIndex.automatic_resources]
* and
* [DeployedIndex.dedicated_resources][google.cloud.aiplatform.v1.DeployedIndex.dedicated_resources]
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.DeployedIndex deployed_index = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDeployedIndex(
com.google.cloud.aiplatform.v1.DeployedIndex.Builder builderForValue) {
if (deployedIndexBuilder_ == null) {
deployedIndex_ = builderForValue.build();
} else {
deployedIndexBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The DeployedIndex to be updated within the IndexEndpoint.
* Currently, the updatable fields are
* [DeployedIndex.automatic_resources][google.cloud.aiplatform.v1.DeployedIndex.automatic_resources]
* and
* [DeployedIndex.dedicated_resources][google.cloud.aiplatform.v1.DeployedIndex.dedicated_resources]
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.DeployedIndex deployed_index = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeDeployedIndex(com.google.cloud.aiplatform.v1.DeployedIndex value) {
if (deployedIndexBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& deployedIndex_ != null
&& deployedIndex_
!= com.google.cloud.aiplatform.v1.DeployedIndex.getDefaultInstance()) {
getDeployedIndexBuilder().mergeFrom(value);
} else {
deployedIndex_ = value;
}
} else {
deployedIndexBuilder_.mergeFrom(value);
}
if (deployedIndex_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The DeployedIndex to be updated within the IndexEndpoint.
* Currently, the updatable fields are
* [DeployedIndex.automatic_resources][google.cloud.aiplatform.v1.DeployedIndex.automatic_resources]
* and
* [DeployedIndex.dedicated_resources][google.cloud.aiplatform.v1.DeployedIndex.dedicated_resources]
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.DeployedIndex deployed_index = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearDeployedIndex() {
bitField0_ = (bitField0_ & ~0x00000002);
deployedIndex_ = null;
if (deployedIndexBuilder_ != null) {
deployedIndexBuilder_.dispose();
deployedIndexBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The DeployedIndex to be updated within the IndexEndpoint.
* Currently, the updatable fields are
* [DeployedIndex.automatic_resources][google.cloud.aiplatform.v1.DeployedIndex.automatic_resources]
* and
* [DeployedIndex.dedicated_resources][google.cloud.aiplatform.v1.DeployedIndex.dedicated_resources]
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.DeployedIndex deployed_index = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1.DeployedIndex.Builder getDeployedIndexBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getDeployedIndexFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The DeployedIndex to be updated within the IndexEndpoint.
* Currently, the updatable fields are
* [DeployedIndex.automatic_resources][google.cloud.aiplatform.v1.DeployedIndex.automatic_resources]
* and
* [DeployedIndex.dedicated_resources][google.cloud.aiplatform.v1.DeployedIndex.dedicated_resources]
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.DeployedIndex deployed_index = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1.DeployedIndexOrBuilder getDeployedIndexOrBuilder() {
if (deployedIndexBuilder_ != null) {
return deployedIndexBuilder_.getMessageOrBuilder();
} else {
return deployedIndex_ == null
? com.google.cloud.aiplatform.v1.DeployedIndex.getDefaultInstance()
: deployedIndex_;
}
}
/**
*
*
* <pre>
* Required. The DeployedIndex to be updated within the IndexEndpoint.
* Currently, the updatable fields are
* [DeployedIndex.automatic_resources][google.cloud.aiplatform.v1.DeployedIndex.automatic_resources]
* and
* [DeployedIndex.dedicated_resources][google.cloud.aiplatform.v1.DeployedIndex.dedicated_resources]
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.DeployedIndex deployed_index = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.DeployedIndex,
com.google.cloud.aiplatform.v1.DeployedIndex.Builder,
com.google.cloud.aiplatform.v1.DeployedIndexOrBuilder>
getDeployedIndexFieldBuilder() {
if (deployedIndexBuilder_ == null) {
deployedIndexBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.DeployedIndex,
com.google.cloud.aiplatform.v1.DeployedIndex.Builder,
com.google.cloud.aiplatform.v1.DeployedIndexOrBuilder>(
getDeployedIndex(), getParentForChildren(), isClean());
deployedIndex_ = null;
}
return deployedIndexBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.MutateDeployedIndexRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.MutateDeployedIndexRequest)
private static final com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest();
}
public static com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MutateDeployedIndexRequest> PARSER =
new com.google.protobuf.AbstractParser<MutateDeployedIndexRequest>() {
@java.lang.Override
public MutateDeployedIndexRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<MutateDeployedIndexRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MutateDeployedIndexRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.MutateDeployedIndexRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/flink | 36,183 | flink-datastream/src/main/java/org/apache/flink/datastream/impl/extension/window/operators/TwoInputNonBroadcastWindowProcessOperator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.datastream.impl.extension.window.operators;
import org.apache.flink.api.common.state.v2.AppendingState;
import org.apache.flink.api.common.state.v2.ListStateDescriptor;
import org.apache.flink.api.common.state.v2.StateDescriptor;
import org.apache.flink.api.common.state.v2.StateIterator;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.typeutils.runtime.TupleSerializer;
import org.apache.flink.datastream.api.context.ProcessingTimeManager;
import org.apache.flink.datastream.api.extension.window.function.TwoInputNonBroadcastWindowStreamProcessFunction;
import org.apache.flink.datastream.api.stream.KeyedPartitionStream;
import org.apache.flink.datastream.impl.context.UnsupportedProcessingTimeManager;
import org.apache.flink.datastream.impl.extension.window.context.DefaultTwoInputWindowContext;
import org.apache.flink.datastream.impl.extension.window.context.WindowTriggerContext;
import org.apache.flink.datastream.impl.extension.window.function.InternalTwoInputWindowStreamProcessFunction;
import org.apache.flink.datastream.impl.extension.window.utils.WindowUtils;
import org.apache.flink.datastream.impl.operators.BaseKeyedTwoInputNonBroadcastProcessOperator;
import org.apache.flink.runtime.state.VoidNamespace;
import org.apache.flink.runtime.state.VoidNamespaceSerializer;
import org.apache.flink.runtime.state.v2.internal.InternalAppendingState;
import org.apache.flink.runtime.state.v2.internal.InternalListState;
import org.apache.flink.runtime.state.v2.internal.InternalMergingState;
import org.apache.flink.streaming.api.operators.InternalTimer;
import org.apache.flink.streaming.api.operators.InternalTimerService;
import org.apache.flink.streaming.api.operators.Triggerable;
import org.apache.flink.streaming.api.windowing.assigners.MergingWindowAssigner;
import org.apache.flink.streaming.api.windowing.assigners.WindowAssigner;
import org.apache.flink.streaming.api.windowing.triggers.Trigger;
import org.apache.flink.streaming.api.windowing.triggers.TriggerResult;
import org.apache.flink.streaming.api.windowing.windows.Window;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.apache.flink.util.TaggedUnion;
import java.util.Collection;
import static org.apache.flink.util.Preconditions.checkArgument;
/**
* Operator for {@link TwoInputNonBroadcastWindowStreamProcessFunction} in {@link
* KeyedPartitionStream}.
*/
public class TwoInputNonBroadcastWindowProcessOperator<K, IN1, IN2, OUT, W extends Window>
extends BaseKeyedTwoInputNonBroadcastProcessOperator<K, IN1, IN2, OUT>
implements Triggerable<K, W> {
private static final long serialVersionUID = 1L;
// ------------------------------------------------------------------------
// Configuration values and user functions
// ------------------------------------------------------------------------
private final TwoInputNonBroadcastWindowStreamProcessFunction<IN1, IN2, OUT>
windowProcessFunction;
/**
* The allowed lateness for elements. This is used for:
*
* <ul>
* <li>Deciding if an element should be dropped from a window due to lateness.
* <li>Clearing the state of a window if the time out-of the {@code window.maxTimestamp +
* allowedLateness} landmark.
* </ul>
*/
protected final long allowedLateness;
// ------------------------------------------------------------------------
// Operator components
// ------------------------------------------------------------------------
protected transient InternalTimerService<W> internalTimerService;
/** For serializing the window in checkpoints. */
private final TypeSerializer<W> windowSerializer;
// ------------------------------------------------------------------------
// Window assigner and trigger
// ------------------------------------------------------------------------
private final WindowAssigner<? super TaggedUnion<IN1, IN2>, W> windowAssigner;
private transient WindowAssigner.WindowAssignerContext windowAssignerContext;
private final Trigger<? super TaggedUnion<IN1, IN2>, ? super W> trigger;
private transient WindowTriggerContext<K, ? super TaggedUnion<IN1, IN2>, W> triggerContext;
private transient DefaultTwoInputWindowContext<K, IN1, IN2, W> windowFunctionContext;
// ------------------------------------------------------------------------
// State that is not checkpointed
// ------------------------------------------------------------------------
private final StateDescriptor<IN1> leftWindowStateDescriptor;
private final StateDescriptor<IN2> rightWindowStateDescriptor;
/**
* The state in which the window contents from the left input are stored. Each window is a
* namespace.
*/
private transient InternalAppendingState<K, W, IN1, IN1, StateIterator<IN1>, Iterable<IN1>>
leftWindowState;
/**
* The state in which the window contents from the right input are stored. Each window is a
* namespace
*/
private transient InternalAppendingState<K, W, IN2, IN2, StateIterator<IN2>, Iterable<IN2>>
rightWindowState;
/**
* The {@link #leftWindowState}, typed to merging state for merging windows. Null if the window
* state is not mergeable.
*/
private transient InternalMergingState<K, W, IN1, IN1, StateIterator<IN1>, Iterable<IN1>>
leftWindowMergingState;
/**
* The {@link #rightWindowState}, typed to merging state for merging windows. Null if the window
* state is not mergeable.
*/
private transient InternalMergingState<K, W, IN2, IN2, StateIterator<IN2>, Iterable<IN2>>
rightWindowMergingState;
/** The state that holds the merging window metadata (the sets that describe what is merged). */
private transient InternalListState<K, VoidNamespace, Tuple2<W, W>> mergingSetsState;
public TwoInputNonBroadcastWindowProcessOperator(
InternalTwoInputWindowStreamProcessFunction<IN1, IN2, OUT, W> windowFunction,
WindowAssigner<? super TaggedUnion<IN1, IN2>, W> windowAssigner,
Trigger<? super TaggedUnion<IN1, IN2>, ? super W> trigger,
TypeSerializer<W> windowSerializer,
StateDescriptor<IN1> leftWindowStateDescriptor,
StateDescriptor<IN2> rightWindowStateDescriptor,
long allowedLateness) {
super(windowFunction);
checkArgument(allowedLateness >= 0);
this.windowProcessFunction = windowFunction.getWindowProcessFunction();
this.windowAssigner = windowAssigner;
this.trigger = trigger;
this.windowSerializer = windowSerializer;
this.leftWindowStateDescriptor = leftWindowStateDescriptor;
this.rightWindowStateDescriptor = rightWindowStateDescriptor;
this.allowedLateness = allowedLateness;
}
@Override
public void open() throws Exception {
super.open();
internalTimerService =
getInternalTimerService("process-window-timers", windowSerializer, this);
// create (or restore) the state that hold the actual window contents
// NOTE - the state may be null in the case of the overriding evicting window operator
if (leftWindowStateDescriptor != null) {
leftWindowState =
getOrCreateKeyedState(
windowSerializer.createInstance(),
windowSerializer,
leftWindowStateDescriptor);
}
if (rightWindowStateDescriptor != null) {
rightWindowState =
getOrCreateKeyedState(
windowSerializer.createInstance(),
windowSerializer,
rightWindowStateDescriptor);
}
// create the typed and helper states for merging windows
if (windowAssigner instanceof MergingWindowAssigner) {
// store a typed reference for the state of merging windows - sanity check
if (leftWindowState instanceof InternalMergingState) {
leftWindowMergingState =
(InternalMergingState<K, W, IN1, IN1, StateIterator<IN1>, Iterable<IN1>>)
leftWindowState;
} else if (leftWindowState != null) {
throw new IllegalStateException(
"The window uses a merging assigner, but the window state is not mergeable.");
}
if (rightWindowState instanceof InternalMergingState) {
rightWindowMergingState =
(InternalMergingState<K, W, IN2, IN2, StateIterator<IN2>, Iterable<IN2>>)
rightWindowState;
} else if (rightWindowState != null) {
throw new IllegalStateException(
"The window uses a merging assigner, but the window state is not mergeable.");
}
@SuppressWarnings("unchecked")
final Class<Tuple2<W, W>> typedTuple = (Class<Tuple2<W, W>>) (Class<?>) Tuple2.class;
final TupleSerializer<Tuple2<W, W>> tupleSerializer =
new TupleSerializer<>(
typedTuple, new TypeSerializer[] {windowSerializer, windowSerializer});
final ListStateDescriptor<Tuple2<W, W>> mergingSetsStateDescriptor =
new ListStateDescriptor<>("merging-window-set", tupleSerializer);
// get the state that stores the merging sets
mergingSetsState =
getOrCreateKeyedState(
VoidNamespaceSerializer.INSTANCE.createInstance(),
VoidNamespaceSerializer.INSTANCE,
mergingSetsStateDescriptor);
mergingSetsState.setCurrentNamespace(VoidNamespace.INSTANCE);
}
triggerContext =
new WindowTriggerContext<>(
null, null, this, internalTimerService, trigger, windowSerializer);
windowAssignerContext =
new WindowAssigner.WindowAssignerContext() {
@Override
public long getCurrentProcessingTime() {
return internalTimerService.currentProcessingTime();
}
};
windowFunctionContext =
new DefaultTwoInputWindowContext<>(
null,
leftWindowState,
rightWindowState,
windowProcessFunction,
this,
windowSerializer,
leftWindowMergingState != null);
}
@Override
public void processElement1(StreamRecord<IN1> element) throws Exception {
final Collection<W> elementWindows =
windowAssigner.assignWindows(
TaggedUnion.one(element.getValue()),
element.getTimestamp(),
windowAssignerContext);
// if element is handled by none of assigned elementWindows
boolean isSkippedElement = true;
final K key = (K) this.getCurrentKey();
if (windowAssigner instanceof MergingWindowAssigner) {
MergingWindowSet<W> mergingWindows = getMergingWindowSet();
for (W window : elementWindows) {
// adding the new window might result in a merge, in that case the actualWindow
// is the merged window and we work with that. If we don't merge then
// actualWindow == window
W actualWindow =
mergingWindows.addWindow(
window,
new MergingWindowSet.MergeFunction<>() {
@Override
public void merge(
W mergeResult,
Collection<W> mergedWindows,
W stateWindowResult,
Collection<W> mergedStateWindows)
throws Exception {
if ((windowAssigner.isEventTime()
&& mergeResult.maxTimestamp() + allowedLateness
<= internalTimerService
.currentWatermark())) {
throw new UnsupportedOperationException(
"The end timestamp of an "
+ "event-time window cannot become earlier than the current watermark "
+ "by merging. Current event time: "
+ internalTimerService
.currentWatermark()
+ " window: "
+ mergeResult);
} else if (!windowAssigner.isEventTime()) {
long currentProcessingTime =
internalTimerService.currentProcessingTime();
if (mergeResult.maxTimestamp()
<= currentProcessingTime) {
throw new UnsupportedOperationException(
"The end timestamp of a "
+ "processing-time window cannot become earlier than the current processing time "
+ "by merging. Current processing time: "
+ currentProcessingTime
+ " window: "
+ mergeResult);
}
}
triggerContext.setKey(key);
triggerContext.setWindow(mergeResult);
triggerContext.onMerge(mergedWindows);
for (W m : mergedWindows) {
triggerContext.setWindow(m);
triggerContext.clear();
WindowUtils.deleteCleanupTimer(
m,
windowAssigner,
triggerContext,
allowedLateness);
}
// merge the merged state windows into the newly resulting
// state window
leftWindowMergingState.mergeNamespaces(
stateWindowResult, mergedStateWindows);
rightWindowMergingState.mergeNamespaces(
stateWindowResult, mergedStateWindows);
}
});
// drop if the window is already late
if (WindowUtils.isWindowLate(
actualWindow, windowAssigner, internalTimerService, allowedLateness)) {
mergingWindows.retireWindow(actualWindow);
continue;
}
isSkippedElement = false;
W stateWindow = mergingWindows.getStateWindow(actualWindow);
if (stateWindow == null) {
throw new IllegalStateException(
"Window " + window + " is not in in-flight window set.");
}
leftWindowState.setCurrentNamespace(stateWindow);
collector.setTimestamp(window.maxTimestamp());
windowFunctionContext.setWindow(window);
windowProcessFunction.onRecord1(
element.getValue(), collector, partitionedContext, windowFunctionContext);
triggerContext.setKey(key);
triggerContext.setWindow(actualWindow);
TriggerResult triggerResult =
triggerContext.onElement(
new StreamRecord<>(
TaggedUnion.one(element.getValue()),
element.getTimestamp()));
if (triggerResult.isFire()) {
emitWindowContents(actualWindow);
}
if (triggerResult.isPurge()) {
leftWindowState.clear();
rightWindowState.clear();
}
WindowUtils.registerCleanupTimer(
actualWindow, windowAssigner, triggerContext, allowedLateness);
}
// need to make sure to update the merging state in state
mergingWindows.persist();
} else {
for (W window : elementWindows) {
// drop if the window is already late
if (WindowUtils.isWindowLate(
window, windowAssigner, internalTimerService, allowedLateness)) {
continue;
}
isSkippedElement = false;
leftWindowState.setCurrentNamespace(window);
collector.setTimestamp(window.maxTimestamp());
windowFunctionContext.setWindow(window);
windowProcessFunction.onRecord1(
element.getValue(), collector, partitionedContext, windowFunctionContext);
triggerContext.setKey(key);
triggerContext.setWindow(window);
TriggerResult triggerResult =
triggerContext.onElement(
new StreamRecord<>(
TaggedUnion.one(element.getValue()),
element.getTimestamp()));
if (triggerResult.isFire()) {
emitWindowContents(window);
}
if (triggerResult.isPurge()) {
leftWindowState.clear();
rightWindowState.clear();
}
WindowUtils.registerCleanupTimer(
window, windowAssigner, triggerContext, allowedLateness);
}
}
// side output input event if element not handled by any window late arriving tag has been
// set windowAssigner is event time and current timestamp + allowed lateness no less than
// element timestamp.
if (isSkippedElement
&& WindowUtils.isElementLate(
element, windowAssigner, allowedLateness, internalTimerService)) {
windowProcessFunction.onLateRecord1(element.getValue(), collector, partitionedContext);
}
}
@Override
public void processElement2(StreamRecord<IN2> element) throws Exception {
final Collection<W> elementWindows =
windowAssigner.assignWindows(
TaggedUnion.two(element.getValue()),
element.getTimestamp(),
windowAssignerContext);
// if element is handled by none of assigned elementWindows
boolean isSkippedElement = true;
final K key = (K) this.getCurrentKey();
if (windowAssigner instanceof MergingWindowAssigner) {
MergingWindowSet<W> mergingWindows = getMergingWindowSet();
for (W window : elementWindows) {
// adding the new window might result in a merge, in that case the actualWindow
// is the merged window and we work with that. If we don't merge then
// actualWindow == window
W actualWindow =
mergingWindows.addWindow(
window,
new MergingWindowSet.MergeFunction<>() {
@Override
public void merge(
W mergeResult,
Collection<W> mergedWindows,
W stateWindowResult,
Collection<W> mergedStateWindows)
throws Exception {
if ((windowAssigner.isEventTime()
&& mergeResult.maxTimestamp() + allowedLateness
<= internalTimerService
.currentWatermark())) {
throw new UnsupportedOperationException(
"The end timestamp of an "
+ "event-time window cannot become earlier than the current watermark "
+ "by merging. Current event time: "
+ internalTimerService
.currentWatermark()
+ " window: "
+ mergeResult);
} else if (!windowAssigner.isEventTime()) {
long currentProcessingTime =
internalTimerService.currentProcessingTime();
if (mergeResult.maxTimestamp()
<= currentProcessingTime) {
throw new UnsupportedOperationException(
"The end timestamp of a "
+ "processing-time window cannot become earlier than the current processing time "
+ "by merging. Current processing time: "
+ currentProcessingTime
+ " window: "
+ mergeResult);
}
}
triggerContext.setKey(key);
triggerContext.setWindow(mergeResult);
triggerContext.onMerge(mergedWindows);
for (W m : mergedWindows) {
triggerContext.setWindow(m);
triggerContext.clear();
WindowUtils.deleteCleanupTimer(
m,
windowAssigner,
triggerContext,
allowedLateness);
}
// merge the merged state windows into the newly resulting
// state window
leftWindowMergingState.mergeNamespaces(
stateWindowResult, mergedStateWindows);
rightWindowMergingState.mergeNamespaces(
stateWindowResult, mergedStateWindows);
}
});
// drop if the window is already late
if (WindowUtils.isWindowLate(
actualWindow, windowAssigner, internalTimerService, allowedLateness)) {
mergingWindows.retireWindow(actualWindow);
continue;
}
isSkippedElement = false;
W stateWindow = mergingWindows.getStateWindow(actualWindow);
if (stateWindow == null) {
throw new IllegalStateException(
"Window " + window + " is not in in-flight window set.");
}
rightWindowState.setCurrentNamespace(stateWindow);
collector.setTimestamp(window.maxTimestamp());
windowFunctionContext.setWindow(window);
windowProcessFunction.onRecord2(
element.getValue(), collector, partitionedContext, windowFunctionContext);
triggerContext.setKey(key);
triggerContext.setWindow(actualWindow);
TriggerResult triggerResult =
triggerContext.onElement(
new StreamRecord<>(
TaggedUnion.two(element.getValue()),
element.getTimestamp()));
if (triggerResult.isFire()) {
emitWindowContents(actualWindow);
}
if (triggerResult.isPurge()) {
leftWindowState.clear();
rightWindowState.clear();
}
WindowUtils.registerCleanupTimer(
actualWindow, windowAssigner, triggerContext, allowedLateness);
}
// need to make sure to update the merging state in state
mergingWindows.persist();
} else {
for (W window : elementWindows) {
// drop if the window is already late
if (WindowUtils.isWindowLate(
window, windowAssigner, internalTimerService, allowedLateness)) {
continue;
}
isSkippedElement = false;
rightWindowState.setCurrentNamespace(window);
collector.setTimestamp(window.maxTimestamp());
windowFunctionContext.setWindow(window);
windowProcessFunction.onRecord2(
element.getValue(), collector, partitionedContext, windowFunctionContext);
triggerContext.setKey(key);
triggerContext.setWindow(window);
TriggerResult triggerResult =
triggerContext.onElement(
new StreamRecord<>(
TaggedUnion.two(element.getValue()),
element.getTimestamp()));
if (triggerResult.isFire()) {
emitWindowContents(window);
}
if (triggerResult.isPurge()) {
leftWindowState.clear();
rightWindowState.clear();
}
WindowUtils.registerCleanupTimer(
window, windowAssigner, triggerContext, allowedLateness);
}
}
// side output input event if element not handled by any window late arriving tag has been
// set windowAssigner is event time and current timestamp + allowed lateness no less than
// element timestamp.
if (isSkippedElement
&& WindowUtils.isElementLate(
element, windowAssigner, allowedLateness, internalTimerService)) {
windowProcessFunction.onLateRecord2(element.getValue(), collector, partitionedContext);
}
}
@Override
public void onEventTime(InternalTimer<K, W> timer) throws Exception {
triggerContext.setKey(timer.getKey());
triggerContext.setWindow(timer.getNamespace());
MergingWindowSet<W> mergingWindows;
if (windowAssigner instanceof MergingWindowAssigner) {
mergingWindows = getMergingWindowSet();
W stateWindow = mergingWindows.getStateWindow(triggerContext.getWindow());
if (stateWindow == null) {
// Timer firing for non-existent window, this can only happen if a
// trigger did not clean up timers. We have already cleared the merging
// window and therefore the Trigger state, however, so nothing to do.
return;
} else {
leftWindowState.setCurrentNamespace(stateWindow);
rightWindowState.setCurrentNamespace(stateWindow);
}
} else {
leftWindowState.setCurrentNamespace(triggerContext.getWindow());
rightWindowState.setCurrentNamespace(triggerContext.getWindow());
mergingWindows = null;
}
TriggerResult triggerResult = triggerContext.onEventTime(timer.getTimestamp());
if (triggerResult.isFire()) {
emitWindowContents(triggerContext.getWindow());
}
if (triggerResult.isPurge()) {
leftWindowState.clear();
rightWindowState.clear();
}
if (windowAssigner.isEventTime()
&& WindowUtils.isCleanupTime(
triggerContext.getWindow(),
timer.getTimestamp(),
windowAssigner,
allowedLateness)) {
clearAllState(
triggerContext.getWindow(), leftWindowState, rightWindowState, mergingWindows);
}
if (mergingWindows != null) {
// need to make sure to update the merging state in state
mergingWindows.persist();
}
}
@Override
public void onProcessingTime(InternalTimer<K, W> timer) throws Exception {
triggerContext.setKey(timer.getKey());
triggerContext.setWindow(timer.getNamespace());
MergingWindowSet<W> mergingWindows;
if (windowAssigner instanceof MergingWindowAssigner) {
mergingWindows = getMergingWindowSet();
W stateWindow = mergingWindows.getStateWindow(triggerContext.getWindow());
if (stateWindow == null) {
// Timer firing for non-existent window, this can only happen if a
// trigger did not clean up timers. We have already cleared the merging
// window and therefore the Trigger state, however, so nothing to do.
return;
} else {
leftWindowState.setCurrentNamespace(stateWindow);
rightWindowState.setCurrentNamespace(stateWindow);
}
} else {
leftWindowState.setCurrentNamespace(triggerContext.getWindow());
rightWindowState.setCurrentNamespace(triggerContext.getWindow());
mergingWindows = null;
}
TriggerResult triggerResult = triggerContext.onProcessingTime(timer.getTimestamp());
if (triggerResult.isFire()) {
emitWindowContents(triggerContext.getWindow());
}
if (triggerResult.isPurge()) {
leftWindowState.clear();
rightWindowState.clear();
}
if (!windowAssigner.isEventTime()
&& WindowUtils.isCleanupTime(
triggerContext.getWindow(),
timer.getTimestamp(),
windowAssigner,
allowedLateness)) {
clearAllState(
triggerContext.getWindow(), leftWindowState, rightWindowState, mergingWindows);
}
if (mergingWindows != null) {
// need to make sure to update the merging state in state
mergingWindows.persist();
}
}
@Override
protected ProcessingTimeManager getProcessingTimeManager() {
// we don't support user utilize processing time in window operators
return UnsupportedProcessingTimeManager.INSTANCE;
}
/**
* Drops all state for the given window and calls {@link Trigger#clear(Window,
* Trigger.TriggerContext)}.
*
* <p>The caller must ensure that the correct key is set in the state backend and the
* triggerContext object.
*/
private void clearAllState(
W window,
AppendingState<IN1, StateIterator<IN1>, Iterable<IN1>> leftWindowState,
AppendingState<IN2, StateIterator<IN2>, Iterable<IN2>> rightWindowState,
MergingWindowSet<W> mergingWindows)
throws Exception {
leftWindowState.clear();
rightWindowState.clear();
triggerContext.clear();
windowFunctionContext.setWindow(window);
windowProcessFunction.onClear(collector, partitionedContext, windowFunctionContext);
if (mergingWindows != null) {
mergingWindows.retireWindow(window);
mergingWindows.persist();
}
}
/**
* Emits the contents of the given window using the user-defined {@link
* TwoInputNonBroadcastWindowStreamProcessFunction}.
*/
private void emitWindowContents(W window) throws Exception {
// only time window touch the time concept.
collector.setTimestamp(window.maxTimestamp());
windowFunctionContext.setWindow(window);
windowProcessFunction.onTrigger(collector, partitionedContext, windowFunctionContext);
}
/**
* Retrieves the {@link MergingWindowSet} for the currently active key. The caller must ensure
* that the correct key is set in the state backend.
*
* <p>The caller must also ensure to properly persist changes to state using {@link
* MergingWindowSet#persist()}.
*/
protected MergingWindowSet<W> getMergingWindowSet() throws Exception {
MergingWindowAssigner<? super TaggedUnion<IN1, IN2>, W> mergingAssigner =
(MergingWindowAssigner<? super TaggedUnion<IN1, IN2>, W>) windowAssigner;
return new MergingWindowSet<>(mergingAssigner, mergingSetsState);
}
}
|
apache/paimon | 36,000 | paimon-core/src/test/java/org/apache/paimon/operation/LocalOrphanFilesCleanTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.paimon.operation;
import org.apache.paimon.Changelog;
import org.apache.paimon.CoreOptions;
import org.apache.paimon.CoreOptions.ExternalPathStrategy;
import org.apache.paimon.Snapshot;
import org.apache.paimon.data.BinaryString;
import org.apache.paimon.data.DataFormatTestUtil;
import org.apache.paimon.data.GenericRow;
import org.apache.paimon.data.InternalRow;
import org.apache.paimon.fs.FileIO;
import org.apache.paimon.fs.FileStatus;
import org.apache.paimon.fs.Path;
import org.apache.paimon.fs.local.LocalFileIO;
import org.apache.paimon.manifest.ManifestList;
import org.apache.paimon.mergetree.compact.ConcatRecordReader;
import org.apache.paimon.options.Options;
import org.apache.paimon.reader.ReaderSupplier;
import org.apache.paimon.reader.RecordReader;
import org.apache.paimon.reader.RecordReaderIterator;
import org.apache.paimon.schema.Schema;
import org.apache.paimon.schema.SchemaManager;
import org.apache.paimon.schema.SchemaUtils;
import org.apache.paimon.schema.TableSchema;
import org.apache.paimon.table.FileStoreTable;
import org.apache.paimon.table.FileStoreTableFactory;
import org.apache.paimon.table.sink.TableCommitImpl;
import org.apache.paimon.table.sink.TableWriteImpl;
import org.apache.paimon.table.source.Split;
import org.apache.paimon.table.source.StreamDataTableScan;
import org.apache.paimon.types.DataType;
import org.apache.paimon.types.DataTypes;
import org.apache.paimon.types.RowKind;
import org.apache.paimon.types.RowType;
import org.apache.paimon.utils.FileStorePathFactory;
import org.apache.paimon.utils.Preconditions;
import org.apache.paimon.utils.SnapshotManager;
import org.apache.paimon.utils.StringUtils;
import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.TreeMap;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import static org.apache.paimon.utils.BranchManager.branchPath;
import static org.apache.paimon.utils.FileStorePathFactory.BUCKET_PATH_PREFIX;
import static org.assertj.core.api.Assertions.assertThat;
/** Test for {@link LocalOrphanFilesClean}. */
public class LocalOrphanFilesCleanTest {
private static final Random RANDOM = new Random(System.currentTimeMillis());
@TempDir private java.nio.file.Path tempDir;
@TempDir private java.nio.file.Path tmpExternalPath;
private Path tablePath;
private FileIO fileIO;
private RowType rowType;
private FileStoreTable table;
private TableWriteImpl<?> write;
private TableCommitImpl commit;
private Path manifestDir;
private long incrementalIdentifier;
private List<Path> manuallyAddedFiles;
@BeforeEach
public void beforeEach() throws Exception {
tablePath = new Path(tempDir.toString());
fileIO = LocalFileIO.create();
rowType =
RowType.of(
new DataType[] {
DataTypes.INT(), DataTypes.INT(), DataTypes.STRING(), DataTypes.STRING()
},
new String[] {"pk", "part1", "part2", "value"});
table = createFileStoreTable(rowType, new Options());
String commitUser = UUID.randomUUID().toString();
write = table.newWrite(commitUser);
commit = table.newCommit(commitUser);
manifestDir = new Path(tablePath, "manifest");
incrementalIdentifier = 0;
manuallyAddedFiles = new ArrayList<>();
}
@AfterEach
public void afterEach() throws Exception {
write.close();
commit.close();
TestPojo.reset();
}
@Test
public void testNormallyRemovingWithExternalPath() throws Throwable {
// recreate the table with another option
this.write.close();
this.commit.close();
Options options = new Options();
String externalPaths = "file://" + tmpExternalPath;
options.set(CoreOptions.DATA_FILE_EXTERNAL_PATHS, externalPaths);
options.set(
CoreOptions.DATA_FILE_EXTERNAL_PATHS_STRATEGY, ExternalPathStrategy.ROUND_ROBIN);
this.table = createFileStoreTable(rowType, options);
String commitUser = UUID.randomUUID().toString();
write = table.newWrite(commitUser);
commit = table.newCommit(commitUser);
normallyRemoving(new Path(tmpExternalPath.toString()));
}
@Test
public void testNormallyRemoving() throws Throwable {
normallyRemoving(tablePath);
}
public void normallyRemoving(Path dataPath) throws Throwable {
int commitTimes = 30;
List<List<TestPojo>> committedData = new ArrayList<>();
Map<Long, List<TestPojo>> snapshotData = new HashMap<>();
SnapshotManager snapshotManager = table.snapshotManager();
writeData(snapshotManager, committedData, snapshotData, new HashMap<>(), commitTimes);
// randomly create tags
List<String> allTags = new ArrayList<>();
int snapshotCount = (int) snapshotManager.snapshotCount();
for (int i = 1; i <= snapshotCount; i++) {
if (RANDOM.nextBoolean()) {
String tagName = "tag" + i;
table.createTag(tagName, i);
allTags.add(tagName);
}
}
// create branch1 by tag
table.createBranch("branch1", allTags.get(0));
// generate non used files
int shouldBeDeleted = generateUnUsedFile(dataPath);
assertThat(manuallyAddedFiles.size()).isEqualTo(shouldBeDeleted);
// randomly expire snapshots
int expired = RANDOM.nextInt(snapshotCount / 2);
expired = expired == 0 ? 1 : expired;
Options expireOptions = new Options();
expireOptions.set(CoreOptions.SNAPSHOT_EXPIRE_LIMIT, snapshotCount);
expireOptions.set(CoreOptions.SNAPSHOT_NUM_RETAINED_MIN, snapshotCount - expired);
expireOptions.set(CoreOptions.SNAPSHOT_NUM_RETAINED_MAX, snapshotCount - expired);
table.copy(expireOptions.toMap()).newCommit("").expireSnapshots();
// randomly delete tags
List<String> deleteTags = Collections.emptyList();
deleteTags = randomlyPick(allTags);
for (String tagName : deleteTags) {
table.deleteTag(tagName);
}
// first check, nothing will be deleted because the default olderThan interval is 1 day
LocalOrphanFilesClean orphanFilesClean = new LocalOrphanFilesClean(table);
assertThat(orphanFilesClean.clean().getDeletedFilesPath().size()).isEqualTo(0);
// second check
orphanFilesClean =
new LocalOrphanFilesClean(
table, System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(2));
List<Path> deleted = orphanFilesClean.clean().getDeletedFilesPath();
try {
validate(deleted, snapshotData, new HashMap<>());
} catch (Throwable t) {
String tableOptions = "Table options:\n" + table.options();
String committed = "Committed data:";
for (int i = 0; i < committedData.size(); i++) {
String insertValues =
committedData.get(i).stream()
.map(TestPojo::toInsertValueString)
.collect(Collectors.joining(","));
committed = String.format("%s\n%d:{%s}", committed, i, insertValues);
}
String snapshot = "Snapshot expired: " + expired;
String tag =
String.format(
"Tags: created{%s}; deleted{%s}",
String.join(",", allTags), String.join(",", deleteTags));
String addedFile =
"Manually added file:\n"
+ manuallyAddedFiles.stream()
.map(Path::toString)
.collect(Collectors.joining("\n"));
throw new Exception(
String.format(
"%s\n%s\n%s\n%s\n%s",
tableOptions, committed, snapshot, tag, addedFile),
t);
}
}
@Test
public void testNormallyRemovingMixedWithExternalPath() throws Throwable {
int commitTimes = 30;
List<List<TestPojo>> committedData = new ArrayList<>();
Map<Long, List<TestPojo>> snapshotData = new HashMap<>();
SnapshotManager snapshotManager = table.snapshotManager();
// 1. write data to the warehouse path
writeData(snapshotManager, committedData, snapshotData, new HashMap<>(), commitTimes);
// 2. write data to the external path
this.write.close();
this.commit.close();
String externalPaths = "file://" + tmpExternalPath;
table.options().put(CoreOptions.DATA_FILE_EXTERNAL_PATHS.key(), externalPaths);
table.options().put(CoreOptions.DATA_FILE_EXTERNAL_PATHS_STRATEGY.key(), "round-robin");
table = table.copy(table.options());
String commitUser = UUID.randomUUID().toString();
write = table.newWrite(commitUser);
commit = table.newCommit(commitUser);
snapshotManager = table.snapshotManager();
writeData(snapshotManager, committedData, snapshotData, new HashMap<>(), commitTimes);
// randomly create tags
List<String> allTags = new ArrayList<>();
int snapshotCount = (int) snapshotManager.snapshotCount();
for (int i = 1; i <= snapshotCount; i++) {
if (RANDOM.nextBoolean()) {
String tagName = "tag" + i;
table.createTag(tagName, i);
allTags.add(tagName);
}
}
// create branch1 by tag
table.createBranch("branch1", allTags.get(0));
// generate non used files
int shouldBeDeleted = generateUnUsedFile(tablePath);
shouldBeDeleted += generateUnUsedFile(new Path(tmpExternalPath.toString()));
assertThat(manuallyAddedFiles.size()).isEqualTo(shouldBeDeleted);
// randomly expire snapshots
int expired = RANDOM.nextInt(snapshotCount / 2);
expired = expired == 0 ? 1 : expired;
Options expireOptions = new Options();
expireOptions.set(CoreOptions.SNAPSHOT_EXPIRE_LIMIT, snapshotCount);
expireOptions.set(CoreOptions.SNAPSHOT_NUM_RETAINED_MIN, snapshotCount - expired);
expireOptions.set(CoreOptions.SNAPSHOT_NUM_RETAINED_MAX, snapshotCount - expired);
table.copy(expireOptions.toMap()).newCommit("").expireSnapshots();
// randomly delete tags
List<String> deleteTags = Collections.emptyList();
deleteTags = randomlyPick(allTags);
for (String tagName : deleteTags) {
table.deleteTag(tagName);
}
// first check, nothing will be deleted because the default olderThan interval is 1 day
LocalOrphanFilesClean orphanFilesClean = new LocalOrphanFilesClean(table);
assertThat(orphanFilesClean.clean().getDeletedFilesPath().size()).isEqualTo(0);
// second check
orphanFilesClean =
new LocalOrphanFilesClean(
table, System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(2));
List<Path> deleted = orphanFilesClean.clean().getDeletedFilesPath();
try {
validate(deleted, snapshotData, new HashMap<>());
} catch (Throwable t) {
String tableOptions = "Table options:\n" + table.options();
String committed = "Committed data:";
for (int i = 0; i < committedData.size(); i++) {
String insertValues =
committedData.get(i).stream()
.map(TestPojo::toInsertValueString)
.collect(Collectors.joining(","));
committed = String.format("%s\n%d:{%s}", committed, i, insertValues);
}
String snapshot = "Snapshot expired: " + expired;
String tag =
String.format(
"Tags: created{%s}; deleted{%s}",
String.join(",", allTags), String.join(",", deleteTags));
String addedFile =
"Manually added file:\n"
+ manuallyAddedFiles.stream()
.map(Path::toString)
.collect(Collectors.joining("\n"));
throw new Exception(
String.format(
"%s\n%s\n%s\n%s\n%s",
tableOptions, committed, snapshot, tag, addedFile),
t);
}
}
private void validate(
List<Path> deleteFiles,
Map<Long, List<TestPojo>> snapshotData,
Map<Long, List<InternalRow>> changelogData)
throws Exception {
assertThat(deleteFiles.stream().map(p -> p.toUri().getPath()))
.containsExactlyInAnyOrderElementsOf(
manuallyAddedFiles.stream()
.map(p -> p.toUri().getPath())
.collect(Collectors.toList()));
Set<Snapshot> snapshots = new HashSet<>();
table.snapshotManager().snapshots().forEachRemaining(snapshots::add);
snapshots.addAll(table.tagManager().taggedSnapshots());
List<Snapshot> sorted =
snapshots.stream()
.sorted(Comparator.comparingLong(Snapshot::id))
.collect(Collectors.toList());
for (Snapshot snapshot : sorted) {
try {
validateSnapshot(snapshot, snapshotData.get(snapshot.id()));
} catch (Exception e) {
throw new Exception("Failed to validate snapshot " + snapshot.id(), e);
}
}
// validate changelog
if (table.coreOptions().changelogProducer() == CoreOptions.ChangelogProducer.INPUT) {
List<Changelog> changelogs = new ArrayList<>();
table.changelogManager().changelogs().forEachRemaining(changelogs::add);
validateChangelog(
changelogs.stream()
.sorted(Comparator.comparingLong(Changelog::id))
.collect(Collectors.toList()),
changelogData);
}
}
private void validateChangelog(
List<Changelog> changelogs, Map<Long, List<InternalRow>> changelogData)
throws Exception {
Preconditions.checkArgument(!changelogs.isEmpty(), "The changelogs should not be empty!");
FileStoreTable scanTable =
table.copy(
Collections.singletonMap(
CoreOptions.SCAN_SNAPSHOT_ID.key(),
String.valueOf(changelogs.get(0).id())));
Long max =
changelogData.keySet().stream()
.max(Comparator.comparingLong(Long::longValue))
.get();
StreamDataTableScan scan = scanTable.newStreamScan();
TreeMap<Long, List<InternalRow>> data = new TreeMap<>(changelogData);
// clear the data < the smallest changelog data.
data.headMap(changelogs.get(0).id()).clear();
// initial plan
scan.plan();
Long id = changelogs.get(0).id();
while (id <= max) {
List<Split> splits = scan.plan().splits();
if (!splits.isEmpty()) {
List<ReaderSupplier<InternalRow>> readers = new ArrayList<>();
for (Split split : splits) {
readers.add(() -> scanTable.newRead().createReader(split));
}
RecordReader<InternalRow> recordReader = ConcatRecordReader.create(readers);
RecordReaderIterator<InternalRow> iterator =
new RecordReaderIterator<>(recordReader);
List<String> result = new ArrayList<>();
while (iterator.hasNext()) {
InternalRow rowData = iterator.next();
result.add(DataFormatTestUtil.internalRowToString(rowData, rowType));
}
iterator.close();
id = scan.checkpoint();
List<InternalRow> batch = data.remove(id - 1);
assertThat(result.stream().sorted().collect(Collectors.joining("\n")))
.isEqualTo(
batch.stream()
.map(
d ->
DataFormatTestUtil.internalRowToString(
d, rowType))
.sorted()
.collect(Collectors.joining("\n")));
} else {
id = scan.checkpoint();
}
}
Assertions.assertThat(data.values().stream().allMatch(List::isEmpty)).isTrue();
}
private void validateSnapshot(Snapshot snapshot, List<TestPojo> data) throws Exception {
List<Split> splits = table.newSnapshotReader().withSnapshot(snapshot).read().splits();
List<ReaderSupplier<InternalRow>> readers = new ArrayList<>();
for (Split split : splits) {
readers.add(() -> table.newRead().createReader(split));
}
RecordReader<InternalRow> recordReader = ConcatRecordReader.create(readers);
RecordReaderIterator<InternalRow> iterator = new RecordReaderIterator<>(recordReader);
List<String> result = new ArrayList<>();
while (iterator.hasNext()) {
InternalRow rowData = iterator.next();
result.add(DataFormatTestUtil.toStringNoRowKind(rowData, rowType));
}
iterator.close();
assertThat(result).containsExactlyInAnyOrderElementsOf(TestPojo.formatData(data));
}
@ValueSource(strings = {"none", "input"})
@ParameterizedTest(name = "changelog-producer = {0}")
public void testCleanOrphanFilesWithChangelogDecoupled(String changelogProducer)
throws Exception {
Options options = new Options();
options.set(CoreOptions.SNAPSHOT_NUM_RETAINED_MAX, 15);
options.set(CoreOptions.CHANGELOG_NUM_RETAINED_MAX, 20);
options.set(CoreOptions.CHANGELOG_PRODUCER.key(), changelogProducer);
cleanOrphanFilesWithChangelogDecoupled(tablePath, options);
}
@ValueSource(strings = {"none", "input"})
@ParameterizedTest(name = "changelog-producer = {0}")
public void testCleanOrphanFilesWithChangelogDecoupledWithExternalPath(String changelogProducer)
throws Exception {
Options options = new Options();
options.set(CoreOptions.SNAPSHOT_NUM_RETAINED_MAX, 15);
options.set(CoreOptions.CHANGELOG_NUM_RETAINED_MAX, 20);
options.set(CoreOptions.CHANGELOG_PRODUCER.key(), changelogProducer);
String externalPaths = "file://" + tmpExternalPath;
options.set(CoreOptions.DATA_FILE_EXTERNAL_PATHS, externalPaths);
options.set(
CoreOptions.DATA_FILE_EXTERNAL_PATHS_STRATEGY, ExternalPathStrategy.ROUND_ROBIN);
cleanOrphanFilesWithChangelogDecoupled(new Path(tmpExternalPath.toString()), options);
}
public void cleanOrphanFilesWithChangelogDecoupled(Path dataPath, Options options)
throws Exception {
// recreate the table with another option
this.write.close();
this.commit.close();
int commitTimes = 30;
FileStoreTable table = createFileStoreTable(rowType, options);
String commitUser = UUID.randomUUID().toString();
this.table = table;
write = table.newWrite(commitUser);
commit = table.newCommit(commitUser);
List<List<TestPojo>> committedData = new ArrayList<>();
Map<Long, List<TestPojo>> snapshotData = new HashMap<>();
Map<Long, List<InternalRow>> changelogData = new HashMap<>();
SnapshotManager snapshotManager = table.snapshotManager();
writeData(snapshotManager, committedData, snapshotData, changelogData, commitTimes);
// create empty branch with same schema
table.createBranch("branch1");
// generate non used files
int shouldBeDeleted = generateUnUsedFile(dataPath);
assertThat(manuallyAddedFiles.size()).isEqualTo(shouldBeDeleted);
// first check, nothing will be deleted because the default olderThan interval is 1 day
LocalOrphanFilesClean orphanFilesClean = new LocalOrphanFilesClean(table);
assertThat(orphanFilesClean.clean().getDeletedFilesPath().size()).isEqualTo(0);
// second check
orphanFilesClean =
new LocalOrphanFilesClean(
table, System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(2));
List<Path> deleted = orphanFilesClean.clean().getDeletedFilesPath();
validate(deleted, snapshotData, changelogData);
}
/** Manually make a FileNotFoundException to simulate snapshot expire while clean. */
@Test
public void testAbnormallyRemoving() throws Exception {
// generate randomly number of snapshots
int num = RANDOM.nextInt(5) + 1;
for (int i = 0; i < num; i++) {
commit(generateData());
}
// randomly delete a manifest file of snapshot 1
SnapshotManager snapshotManager = table.snapshotManager();
Snapshot snapshot1 = snapshotManager.snapshot(1);
List<Path> manifests = new ArrayList<>();
ManifestList manifestList = table.store().manifestListFactory().create();
FileStorePathFactory pathFactory = table.store().pathFactory();
manifestList
.readAllManifests(snapshot1)
.forEach(m -> manifests.add(pathFactory.toManifestFilePath(m.fileName())));
Path manifest = manifests.get(RANDOM.nextInt(manifests.size()));
fileIO.deleteQuietly(manifest);
LocalOrphanFilesClean orphanFilesClean =
new LocalOrphanFilesClean(
table, System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(2));
assertThat(orphanFilesClean.clean().getDeletedFilesPath().size()).isGreaterThan(0);
}
@Test
public void testRemovingEmptyDirectories() throws Exception {
List<List<TestPojo>> committedData = new ArrayList<>();
Map<Long, List<TestPojo>> snapshotData = new HashMap<>();
SnapshotManager snapshotManager = table.snapshotManager();
writeData(snapshotManager, committedData, snapshotData, new HashMap<>(), 1);
Path emptyDirectory1 = new Path(tablePath.toString(), "part1=1/part2=2/bucket-0");
Path emptyDirectory2 = new Path(tablePath.toString(), "part1=1/part2=2/bucket-1");
fileIO.mkdirs(emptyDirectory1);
fileIO.mkdirs(emptyDirectory2);
assertThat(fileIO.exists(emptyDirectory1)).isTrue();
assertThat(fileIO.exists(emptyDirectory2)).isTrue();
LocalOrphanFilesClean orphanFilesClean = new LocalOrphanFilesClean(table);
List<Path> deleted = orphanFilesClean.clean().getDeletedFilesPath();
assertThat(fileIO.exists(emptyDirectory1)).isFalse();
assertThat(fileIO.exists(emptyDirectory2)).isFalse();
validate(deleted, snapshotData, new HashMap<>());
}
private void writeData(
SnapshotManager snapshotManager,
List<List<TestPojo>> committedData,
Map<Long, List<TestPojo>> snapshotData,
Map<Long, List<InternalRow>> changelogData,
int commitTimes)
throws Exception {
// first snapshot
Long latestSnapshotId = snapshotManager.latestSnapshotId();
List<TestPojo> data = generateData();
commit(data);
committedData.add(data);
List<TestPojo> current = new ArrayList<>();
if (latestSnapshotId != null) {
current.addAll(snapshotData.get(latestSnapshotId));
}
current.addAll(data);
recordSnapshotData(current, snapshotData, snapshotManager);
recordChangelogData(new ArrayList<>(), current, changelogData, snapshotManager);
// randomly generate data
for (int i = 1; i <= commitTimes; i++) {
List<TestPojo> previous =
new ArrayList<>(snapshotData.get(snapshotManager.latestSnapshotId()));
// randomly update
if (RANDOM.nextBoolean()) {
List<TestPojo> toBeUpdated = randomlyPick(previous);
List<TestPojo> updateAfter = commitUpdate(toBeUpdated);
committedData.add(updateAfter);
previous.removeAll(toBeUpdated);
previous.addAll(updateAfter);
recordSnapshotData(previous, snapshotData, snapshotManager);
recordChangelogData(toBeUpdated, updateAfter, changelogData, snapshotManager);
} else {
current = generateData();
commit(current);
committedData.add(current);
recordChangelogData(new ArrayList<>(), current, changelogData, snapshotManager);
current.addAll(previous);
recordSnapshotData(current, snapshotData, snapshotManager);
}
}
}
private int generateUnUsedFile(Path dataPath) throws Exception {
int shouldBeDeleted = 0;
int fileNum = RANDOM.nextInt(10);
fileNum = fileNum == 0 ? 1 : fileNum;
// snapshot
addNonUsedFiles(
new Path(tablePath, "snapshot"), fileNum, Collections.singletonList("UNKNOWN"));
shouldBeDeleted += fileNum;
// changelog
addNonUsedFiles(
new Path(tablePath, "changelog"), fileNum, Collections.singletonList("UNKNOWN"));
shouldBeDeleted += fileNum;
// data files
shouldBeDeleted += randomlyAddNonUsedDataFiles(dataPath);
// manifests
addNonUsedFiles(
manifestDir,
fileNum,
Arrays.asList("manifest-list-", "manifest-", "index-manifest-", "UNKNOWN-"));
shouldBeDeleted += fileNum;
// branch snapshot
addNonUsedFiles(
new Path(branchPath(tablePath, "branch1") + "/snapshot"),
fileNum,
Collections.singletonList("UNKNOWN"));
shouldBeDeleted += fileNum;
return shouldBeDeleted;
}
private List<TestPojo> generateData() {
int num = RANDOM.nextInt(6) + 5;
List<TestPojo> data = new ArrayList<>(num);
for (int i = 0; i < num; i++) {
data.add(TestPojo.next());
}
return data;
}
private void commit(List<TestPojo> data) throws Exception {
for (TestPojo d : data) {
write.write(d.toRow(RowKind.INSERT));
}
commit.commit(incrementalIdentifier, write.prepareCommit(true, incrementalIdentifier));
incrementalIdentifier++;
}
private List<TestPojo> commitUpdate(List<TestPojo> updates) throws Exception {
List<TestPojo> after = new ArrayList<>();
for (TestPojo u : updates) {
write.write(u.toRow(RowKind.UPDATE_BEFORE));
TestPojo updateAfter = u.copyWithNewValue();
after.add(updateAfter);
write.write(updateAfter.toRow(RowKind.UPDATE_AFTER));
}
commit.commit(incrementalIdentifier, write.prepareCommit(true, incrementalIdentifier));
incrementalIdentifier++;
return after;
}
private void recordSnapshotData(
List<TestPojo> data,
Map<Long, List<TestPojo>> snapshotData,
SnapshotManager snapshotManager) {
Snapshot latest = snapshotManager.latestSnapshot();
if (latest.commitKind() == Snapshot.CommitKind.COMPACT) {
snapshotData.put(latest.id() - 1, data);
}
snapshotData.put(latest.id(), data);
}
private void recordChangelogData(
List<TestPojo> updateBefore,
List<TestPojo> updateAfter,
Map<Long, List<InternalRow>> changelogData,
SnapshotManager snapshotManager) {
Snapshot latest = snapshotManager.latestSnapshot();
boolean isInsert = updateBefore.isEmpty();
if (table.coreOptions().changelogProducer() == CoreOptions.ChangelogProducer.INPUT) {
List<InternalRow> data = new ArrayList<>();
for (TestPojo testPojo : updateBefore) {
data.add(testPojo.toRow(RowKind.UPDATE_BEFORE));
}
for (TestPojo testPojo : updateAfter) {
data.add(testPojo.toRow(isInsert ? RowKind.INSERT : RowKind.UPDATE_AFTER));
}
if (latest.commitKind() != Snapshot.CommitKind.COMPACT) {
changelogData.put(latest.id(), data);
} else {
changelogData.put(latest.id() - 1, data);
changelogData.put(latest.id(), new ArrayList<>());
}
} else {
changelogData.put(latest.id(), new ArrayList<>());
}
}
private int randomlyAddNonUsedDataFiles(Path dataPath) throws IOException {
int addedFiles = 0;
List<Path> part1 = listSubDirs(dataPath, p -> p.getName().contains("="));
List<Path> part2 = new ArrayList<>();
List<Path> buckets = new ArrayList<>();
for (Path path : part1) {
part2.addAll(listSubDirs(path, p -> p.getName().contains("=")));
}
for (Path path : part2) {
buckets.addAll(listSubDirs(path, p -> p.getName().startsWith(BUCKET_PATH_PREFIX)));
}
// add files
List<Path> corruptedBuckets = randomlyPick(buckets);
for (Path path : corruptedBuckets) {
addNonUsedFiles(
path,
1,
Arrays.asList(
table.coreOptions().dataFilePrefix(),
table.coreOptions().changelogFilePrefix(),
"UNKNOWN-"));
}
addedFiles += corruptedBuckets.size();
return addedFiles;
}
private <T> List<T> randomlyPick(List<T> list) {
int num = RANDOM.nextInt(list.size());
num = num == 0 ? 1 : num;
List<T> copy = new ArrayList<>(list);
List<T> picked = new ArrayList<>();
for (int i = 0; i < num; i++) {
int index = RANDOM.nextInt(copy.size());
picked.add(copy.remove(index));
}
return picked;
}
private void addNonUsedFiles(Path dir, int fileNum, List<String> fileNamePrefix)
throws IOException {
for (int i = 0; i < fileNum; i++) {
String fileName =
fileNamePrefix.get(RANDOM.nextInt(fileNamePrefix.size())) + UUID.randomUUID();
Path file = new Path(dir, fileName);
if (RANDOM.nextBoolean()) {
fileIO.tryToWriteAtomic(file, "");
} else {
fileIO.mkdirs(file);
}
manuallyAddedFiles.add(file);
}
}
private List<Path> listSubDirs(Path root, Predicate<Path> filter) throws IOException {
return Arrays.stream(fileIO.listStatus(root))
.map(FileStatus::getPath)
.filter(filter)
.collect(Collectors.toList());
}
private static class TestPojo {
private static int increaseKey = 0;
private final int pk;
// 0-2
private final int part1;
// A-C
private final String part2;
private final String value;
public TestPojo(int pk, int part1, String part2, String value) {
this.pk = pk;
this.part1 = part1;
this.part2 = part2;
this.value = value;
}
public InternalRow toRow(RowKind rowKind) {
return GenericRow.ofKind(
rowKind,
pk,
part1,
BinaryString.fromString(part2),
BinaryString.fromString(value));
}
public String toInsertValueString() {
return String.format("(%d, %d, '%s', '%s')", pk, part1, part2, value);
}
public TestPojo copyWithNewValue() {
return new TestPojo(pk, part1, part2, randomValue());
}
@Override
public String toString() {
return "TestPojo{"
+ "pk="
+ pk
+ ", part1="
+ part1
+ ", part2='"
+ part2
+ '\''
+ ", value='"
+ value
+ '\''
+ '}';
}
public static void reset() {
increaseKey = 0;
}
public static TestPojo next() {
int pk = increaseKey++;
int part1 = RANDOM.nextInt(3);
char c = (char) (RANDOM.nextInt(3) + 'A');
String part2 = String.valueOf(c);
String value = randomValue();
return new TestPojo(pk, part1, part2, value);
}
public static List<String> formatData(List<TestPojo> data) {
return data.stream().map(TestPojo::format).collect(Collectors.toList());
}
private String format() {
return String.format("%d, %d, %s, %s", pk, part1, part2, value);
}
}
private static String randomValue() {
return StringUtils.getRandomString(RANDOM, 5, 20, 'a', 'z');
}
private FileStoreTable createFileStoreTable(RowType rowType, Options conf) throws Exception {
conf.set(CoreOptions.PATH, tablePath.toString());
conf.set(CoreOptions.BUCKET, RANDOM.nextInt(3) + 1);
TableSchema tableSchema =
SchemaUtils.forceCommit(
new SchemaManager(fileIO, tablePath),
new Schema(
rowType.getFields(),
Arrays.asList("part1", "part2"),
Arrays.asList("pk", "part1", "part2"),
conf.toMap(),
""));
return FileStoreTableFactory.create(fileIO, tablePath, tableSchema);
}
}
|
apache/tomcat80 | 35,872 | test/org/apache/tomcat/websocket/TestWsWebSocketContainer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tomcat.websocket;
import java.io.IOException;
import java.net.SocketTimeoutException;
import java.net.URI;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import javax.servlet.ServletContextEvent;
import javax.websocket.ClientEndpointConfig;
import javax.websocket.ContainerProvider;
import javax.websocket.DeploymentException;
import javax.websocket.Endpoint;
import javax.websocket.EndpointConfig;
import javax.websocket.Extension;
import javax.websocket.MessageHandler;
import javax.websocket.OnMessage;
import javax.websocket.Session;
import javax.websocket.WebSocketContainer;
import javax.websocket.server.ServerContainer;
import javax.websocket.server.ServerEndpoint;
import javax.websocket.server.ServerEndpointConfig;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Test;
import org.apache.catalina.Context;
import org.apache.catalina.servlets.DefaultServlet;
import org.apache.catalina.startup.Tomcat;
import org.apache.coyote.http11.Http11Protocol;
import org.apache.tomcat.util.net.TesterSupport;
import org.apache.tomcat.websocket.TesterMessageCountClient.BasicBinary;
import org.apache.tomcat.websocket.TesterMessageCountClient.BasicHandler;
import org.apache.tomcat.websocket.TesterMessageCountClient.BasicText;
import org.apache.tomcat.websocket.TesterMessageCountClient.TesterEndpoint;
import org.apache.tomcat.websocket.TesterMessageCountClient.TesterProgrammaticEndpoint;
import org.apache.tomcat.websocket.server.Constants;
import org.apache.tomcat.websocket.server.WsContextListener;
public class TestWsWebSocketContainer extends WebSocketBaseTest {
private static final String MESSAGE_EMPTY = "";
private static final String MESSAGE_STRING_1 = "qwerty";
private static final String MESSAGE_TEXT_4K;
private static final byte[] MESSAGE_BINARY_4K = new byte[4096];
private static final long TIMEOUT_MS = 5 * 1000;
private static final long MARGIN = 500;
// 5s should be plenty but Gump can be a lot slower
private static final long START_STOP_WAIT = 60 * 1000;
static {
StringBuilder sb = new StringBuilder(4096);
for (int i = 0; i < 4096; i++) {
sb.append('*');
}
MESSAGE_TEXT_4K = sb.toString();
}
@Test
public void testConnectToServerEndpoint() throws Exception {
Tomcat tomcat = getTomcatInstance();
// No file system docBase required
Context ctx = tomcat.addContext("", null);
ctx.addApplicationListener(TesterEchoServer.Config.class.getName());
Tomcat.addServlet(ctx, "default", new DefaultServlet());
ctx.addServletMappingDecoded("/", "default");
tomcat.start();
WebSocketContainer wsContainer =
ContainerProvider.getWebSocketContainer();
// Set this artificially small to trigger
// https://bz.apache.org/bugzilla/show_bug.cgi?id=57054
wsContainer.setDefaultMaxBinaryMessageBufferSize(64);
Session wsSession = wsContainer.connectToServer(
TesterProgrammaticEndpoint.class,
ClientEndpointConfig.Builder.create().build(),
new URI("ws://" + getHostName() + ":" + getPort() +
TesterEchoServer.Config.PATH_ASYNC));
CountDownLatch latch = new CountDownLatch(1);
BasicText handler = new BasicText(latch);
wsSession.addMessageHandler(handler);
wsSession.getBasicRemote().sendText(MESSAGE_STRING_1);
boolean latchResult = handler.getLatch().await(10, TimeUnit.SECONDS);
Assert.assertTrue(latchResult);
Queue<String> messages = handler.getMessages();
Assert.assertEquals(1, messages.size());
Assert.assertEquals(MESSAGE_STRING_1, messages.peek());
((WsWebSocketContainer) wsContainer).destroy();
}
@Test(expected=javax.websocket.DeploymentException.class)
public void testConnectToServerEndpointInvalidScheme() throws Exception {
Tomcat tomcat = getTomcatInstance();
// No file system docBase required
Context ctx = tomcat.addContext("", null);
ctx.addApplicationListener(TesterEchoServer.Config.class.getName());
tomcat.start();
WebSocketContainer wsContainer =
ContainerProvider.getWebSocketContainer();
wsContainer.connectToServer(TesterProgrammaticEndpoint.class,
ClientEndpointConfig.Builder.create().build(),
new URI("ftp://" + getHostName() + ":" + getPort() +
TesterEchoServer.Config.PATH_ASYNC));
}
@Test(expected=javax.websocket.DeploymentException.class)
public void testConnectToServerEndpointNoHost() throws Exception {
Tomcat tomcat = getTomcatInstance();
// No file system docBase required
Context ctx = tomcat.addContext("", null);
ctx.addApplicationListener(TesterEchoServer.Config.class.getName());
tomcat.start();
WebSocketContainer wsContainer =
ContainerProvider.getWebSocketContainer();
wsContainer.connectToServer(TesterProgrammaticEndpoint.class,
ClientEndpointConfig.Builder.create().build(),
new URI("ws://" + TesterEchoServer.Config.PATH_ASYNC));
}
@Test
public void testSmallTextBufferClientTextMessage() throws Exception {
doBufferTest(true, false, true, false);
}
@Test
public void testSmallTextBufferClientBinaryMessage() throws Exception {
doBufferTest(true, false, false, true);
}
@Test
public void testSmallTextBufferServerTextMessage() throws Exception {
doBufferTest(true, true, true, false);
}
@Test
public void testSmallTextBufferServerBinaryMessage() throws Exception {
doBufferTest(true, true, false, true);
}
@Test
public void testSmallBinaryBufferClientTextMessage() throws Exception {
doBufferTest(false, false, true, true);
}
@Test
public void testSmallBinaryBufferClientBinaryMessage() throws Exception {
doBufferTest(false, false, false, false);
}
@Test
public void testSmallBinaryBufferServerTextMessage() throws Exception {
doBufferTest(false, true, true, true);
}
@Test
public void testSmallBinaryBufferServerBinaryMessage() throws Exception {
doBufferTest(false, true, false, false);
}
private void doBufferTest(boolean isTextBuffer, boolean isServerBuffer,
boolean isTextMessage, boolean pass) throws Exception {
Tomcat tomcat = getTomcatInstance();
// No file system docBase required
Context ctx = tomcat.addContext("", null);
ctx.addApplicationListener(TesterEchoServer.Config.class.getName());
Tomcat.addServlet(ctx, "default", new DefaultServlet());
ctx.addServletMappingDecoded("/", "default");
WebSocketContainer wsContainer =
ContainerProvider.getWebSocketContainer();
if (isServerBuffer) {
if (isTextBuffer) {
ctx.addParameter(
org.apache.tomcat.websocket.server.Constants.
TEXT_BUFFER_SIZE_SERVLET_CONTEXT_INIT_PARAM,
"1024");
} else {
ctx.addParameter(
org.apache.tomcat.websocket.server.Constants.
BINARY_BUFFER_SIZE_SERVLET_CONTEXT_INIT_PARAM,
"1024");
}
} else {
if (isTextBuffer) {
wsContainer.setDefaultMaxTextMessageBufferSize(1024);
} else {
wsContainer.setDefaultMaxBinaryMessageBufferSize(1024);
}
}
tomcat.start();
Session wsSession = wsContainer.connectToServer(
TesterProgrammaticEndpoint.class,
ClientEndpointConfig.Builder.create().build(),
new URI("ws://" + getHostName() + ":" + getPort() +
TesterEchoServer.Config.PATH_BASIC));
BasicHandler<?> handler;
CountDownLatch latch = new CountDownLatch(1);
TesterEndpoint tep =
(TesterEndpoint) wsSession.getUserProperties().get("endpoint");
tep.setLatch(latch);
if (isTextMessage) {
handler = new BasicText(latch);
} else {
handler = new BasicBinary(latch);
}
wsSession.addMessageHandler(handler);
try {
if (isTextMessage) {
wsSession.getBasicRemote().sendText(MESSAGE_TEXT_4K);
} else {
wsSession.getBasicRemote().sendBinary(
ByteBuffer.wrap(MESSAGE_BINARY_4K));
}
} catch (IOException ioe) {
// Some messages sends are expected to fail. Assertions further on
// in this method will check for the correct behaviour so ignore any
// exception here.
}
boolean latchResult = handler.getLatch().await(10, TimeUnit.SECONDS);
Assert.assertTrue(latchResult);
Queue<?> messages = handler.getMessages();
if (pass) {
Assert.assertEquals(1, messages.size());
if (isTextMessage) {
Assert.assertEquals(MESSAGE_TEXT_4K, messages.peek());
} else {
Assert.assertEquals(ByteBuffer.wrap(MESSAGE_BINARY_4K),
messages.peek());
}
} else {
// When the message exceeds the buffer size, the WebSocket is
// closed. The endpoint ensures that the latch is cleared when the
// WebSocket closes. However, the session isn't marked as closed
// until after the onClose() method completes so there is a small
// window where this test could fail. Therefore, wait briefly to
// give the session a chance to complete the close process.
for (int i = 0; i < 500; i++) {
if (!wsSession.isOpen()) {
break;
}
Thread.sleep(10);
}
Assert.assertFalse(wsSession.isOpen());
}
}
@Test
public void testWriteTimeoutClientContainer() throws Exception {
doTestWriteTimeoutClient(true);
}
@Test
public void testWriteTimeoutClientEndpoint() throws Exception {
doTestWriteTimeoutClient(false);
}
private void doTestWriteTimeoutClient(boolean setTimeoutOnContainer)
throws Exception {
Tomcat tomcat = getTomcatInstance();
// No file system docBase required
Context ctx = tomcat.addContext("", null);
ctx.addApplicationListener(BlockingConfig.class.getName());
Tomcat.addServlet(ctx, "default", new DefaultServlet());
ctx.addServletMappingDecoded("/", "default");
WebSocketContainer wsContainer =
ContainerProvider.getWebSocketContainer();
// Set the async timeout
if (setTimeoutOnContainer) {
wsContainer.setAsyncSendTimeout(TIMEOUT_MS);
}
tomcat.start();
Session wsSession = wsContainer.connectToServer(
TesterProgrammaticEndpoint.class,
ClientEndpointConfig.Builder.create().build(),
new URI("ws://" + getHostName() + ":" + getPort() + BlockingConfig.PATH));
if (!setTimeoutOnContainer) {
wsSession.getAsyncRemote().setSendTimeout(TIMEOUT_MS);
}
long lastSend = 0;
// Should send quickly until the network buffers fill up and then block
// until the timeout kicks in
Exception exception = null;
try {
while (true) {
lastSend = System.currentTimeMillis();
Future<Void> f = wsSession.getAsyncRemote().sendBinary(
ByteBuffer.wrap(MESSAGE_BINARY_4K));
f.get();
}
} catch (Exception e) {
exception = e;
}
long timeout = System.currentTimeMillis() - lastSend;
// Clear the server side block and prevent further blocks to allow the
// server to shutdown cleanly
BlockingPojo.clearBlock();
// Close the client session, primarily to allow the
// BackgroundProcessManager to shut down.
wsSession.close();
String msg = "Time out was [" + timeout + "] ms";
// Check correct time passed
Assert.assertTrue(msg, timeout >= TIMEOUT_MS - MARGIN );
// Check the timeout wasn't too long
Assert.assertTrue(msg, timeout < TIMEOUT_MS * 2);
Assert.assertNotNull(exception);
}
@Test
public void testWriteTimeoutServerContainer() throws Exception {
doTestWriteTimeoutServer(true);
}
@Test
public void testWriteTimeoutServerEndpoint() throws Exception {
doTestWriteTimeoutServer(false);
}
private static volatile boolean timeoutOnContainer = false;
private void doTestWriteTimeoutServer(boolean setTimeoutOnContainer)
throws Exception {
// This will never work for BIO
Assume.assumeFalse(
"Skipping test. This feature will never work for BIO connector.",
getProtocol().equals(Http11Protocol.class.getName()));
/*
* Note: There are all sorts of horrible uses of statics in this test
* because the API uses classes and the tests really need access
* to the instances which simply isn't possible.
*/
timeoutOnContainer = setTimeoutOnContainer;
Tomcat tomcat = getTomcatInstance();
// No file system docBase required
Context ctx = tomcat.addContext("", null);
ctx.addApplicationListener(ConstantTxConfig.class.getName());
Tomcat.addServlet(ctx, "default", new DefaultServlet());
ctx.addServletMappingDecoded("/", "default");
WebSocketContainer wsContainer =
ContainerProvider.getWebSocketContainer();
tomcat.start();
Session wsSession = wsContainer.connectToServer(
TesterProgrammaticEndpoint.class,
ClientEndpointConfig.Builder.create().build(),
new URI("ws://" + getHostName() + ":" + getPort() +
ConstantTxConfig.PATH));
wsSession.addMessageHandler(new BlockingBinaryHandler());
int loops = 0;
while (loops < 15) {
Thread.sleep(1000);
if (!ConstantTxEndpoint.getRunning()) {
break;
}
loops++;
}
// Close the client session, primarily to allow the
// BackgroundProcessManager to shut down.
wsSession.close();
// Check the right exception was thrown
Assert.assertNotNull(ConstantTxEndpoint.getException());
Assert.assertEquals(ExecutionException.class,
ConstantTxEndpoint.getException().getClass());
Assert.assertNotNull(ConstantTxEndpoint.getException().getCause());
Assert.assertEquals(SocketTimeoutException.class,
ConstantTxEndpoint.getException().getCause().getClass());
// Check correct time passed
Assert.assertTrue(ConstantTxEndpoint.getTimeout() >= TIMEOUT_MS);
// Check the timeout wasn't too long
Assert.assertTrue(ConstantTxEndpoint.getTimeout() < TIMEOUT_MS*2);
}
public static class BlockingConfig extends WsContextListener {
public static final String PATH = "/block";
@Override
public void contextInitialized(ServletContextEvent sce) {
super.contextInitialized(sce);
ServerContainer sc =
(ServerContainer) sce.getServletContext().getAttribute(
Constants.SERVER_CONTAINER_SERVLET_CONTEXT_ATTRIBUTE);
try {
// Reset blocking state
BlockingPojo.resetBlock();
sc.addEndpoint(BlockingPojo.class);
} catch (DeploymentException e) {
throw new IllegalStateException(e);
}
}
}
@ServerEndpoint("/block")
public static class BlockingPojo {
private static Object monitor = new Object();
// Enable blocking by default
private static boolean block = true;
/**
* Clear any current block.
*/
public static void clearBlock() {
synchronized (monitor) {
BlockingPojo.block = false;
monitor.notifyAll();
}
}
public static void resetBlock() {
synchronized (monitor) {
block = true;
}
}
@SuppressWarnings("unused")
@OnMessage
public void echoTextMessage(Session session, String msg, boolean last) {
try {
synchronized (monitor) {
while (block) {
monitor.wait();
}
}
} catch (InterruptedException e) {
// Ignore
}
}
@SuppressWarnings("unused")
@OnMessage
public void echoBinaryMessage(Session session, ByteBuffer msg,
boolean last) {
try {
synchronized (monitor) {
while (block) {
monitor.wait();
}
}
} catch (InterruptedException e) {
// Ignore
}
}
}
public static class BlockingBinaryHandler
implements MessageHandler.Partial<ByteBuffer> {
@Override
public void onMessage(ByteBuffer messagePart, boolean last) {
try {
Thread.sleep(TIMEOUT_MS * 10);
} catch (InterruptedException e) {
// Ignore
}
}
}
public static class ConstantTxEndpoint extends Endpoint {
// Have to be static to be able to retrieve results from test case
private static volatile long timeout = -1;
private static volatile Exception exception = null;
private static volatile boolean running = true;
@Override
public void onOpen(Session session, EndpointConfig config) {
// Reset everything
timeout = -1;
exception = null;
running = true;
if (!TestWsWebSocketContainer.timeoutOnContainer) {
session.getAsyncRemote().setSendTimeout(TIMEOUT_MS);
}
// The close message is written with a blocking write. This is going
// to fail so reduce the timeout from the default so the test
// completes faster
session.getUserProperties().put(
WsRemoteEndpointImplBase.BLOCKING_SEND_TIMEOUT_PROPERTY, Long.valueOf(5000));
// Should send quickly until the network buffers fill up and then
// block until the timeout kicks in
long lastSend = 0;
try {
while (true) {
lastSend = System.currentTimeMillis();
Future<Void> f = session.getAsyncRemote().sendBinary(
ByteBuffer.wrap(MESSAGE_BINARY_4K));
f.get();
}
} catch (ExecutionException | InterruptedException e) {
exception = e;
}
timeout = System.currentTimeMillis() - lastSend;
running = false;
}
public static long getTimeout() {
return timeout;
}
public static Exception getException() {
return exception;
}
public static boolean getRunning() {
return running;
}
}
public static class ConstantTxConfig extends WsContextListener {
private static final String PATH = "/test";
@Override
public void contextInitialized(ServletContextEvent sce) {
super.contextInitialized(sce);
ServerContainer sc =
(ServerContainer) sce.getServletContext().getAttribute(
Constants.SERVER_CONTAINER_SERVLET_CONTEXT_ATTRIBUTE);
try {
sc.addEndpoint(ServerEndpointConfig.Builder.create(
ConstantTxEndpoint.class, PATH).build());
if (TestWsWebSocketContainer.timeoutOnContainer) {
sc.setAsyncSendTimeout(TIMEOUT_MS);
}
} catch (DeploymentException e) {
throw new IllegalStateException(e);
}
}
}
@Test
public void testGetOpenSessions() throws Exception {
Tomcat tomcat = getTomcatInstance();
// No file system docBase required
Context ctx = tomcat.addContext("", null);
ctx.addApplicationListener(TesterEchoServer.Config.class.getName());
Tomcat.addServlet(ctx, "default", new DefaultServlet());
ctx.addServletMappingDecoded("/", "default");
tomcat.start();
WebSocketContainer wsContainer =
ContainerProvider.getWebSocketContainer();
EndpointA endpointA = new EndpointA();
Session s1a = connectToEchoServer(wsContainer, endpointA,
TesterEchoServer.Config.PATH_BASIC);
Session s2a = connectToEchoServer(wsContainer, endpointA,
TesterEchoServer.Config.PATH_BASIC);
Session s3a = connectToEchoServer(wsContainer, endpointA,
TesterEchoServer.Config.PATH_BASIC);
EndpointB endpointB = new EndpointB();
Session s1b = connectToEchoServer(wsContainer, endpointB,
TesterEchoServer.Config.PATH_BASIC);
Session s2b = connectToEchoServer(wsContainer, endpointB,
TesterEchoServer.Config.PATH_BASIC);
Set<Session> setA = s3a.getOpenSessions();
Assert.assertEquals(3, setA.size());
Assert.assertTrue(setA.remove(s1a));
Assert.assertTrue(setA.remove(s2a));
Assert.assertTrue(setA.remove(s3a));
s1a.close();
setA = s3a.getOpenSessions();
Assert.assertEquals(2, setA.size());
Assert.assertFalse(setA.remove(s1a));
Assert.assertTrue(setA.remove(s2a));
Assert.assertTrue(setA.remove(s3a));
Set<Session> setB = s1b.getOpenSessions();
Assert.assertEquals(2, setB.size());
Assert.assertTrue(setB.remove(s1b));
Assert.assertTrue(setB.remove(s2b));
// Close sessions explicitly as Gump reports a session remains open at
// the end of this test
s2a.close();
s3a.close();
s1b.close();
s2b.close();
}
@Test
public void testSessionExpiryContainer() throws Exception {
Tomcat tomcat = getTomcatInstance();
// No file system docBase required
Context ctx = tomcat.addContext("", null);
ctx.addApplicationListener(TesterEchoServer.Config.class.getName());
Tomcat.addServlet(ctx, "default", new DefaultServlet());
ctx.addServletMappingDecoded("/", "default");
tomcat.start();
// Need access to implementation methods for configuring unit tests
WsWebSocketContainer wsContainer = (WsWebSocketContainer)
ContainerProvider.getWebSocketContainer();
// 5 second timeout
wsContainer.setDefaultMaxSessionIdleTimeout(5000);
wsContainer.setProcessPeriod(1);
EndpointA endpointA = new EndpointA();
connectToEchoServer(wsContainer, endpointA,
TesterEchoServer.Config.PATH_BASIC);
connectToEchoServer(wsContainer, endpointA,
TesterEchoServer.Config.PATH_BASIC);
Session s3a = connectToEchoServer(wsContainer, endpointA,
TesterEchoServer.Config.PATH_BASIC);
// Check all three sessions are open
Set<Session> setA = s3a.getOpenSessions();
Assert.assertEquals(3, setA.size());
int count = 0;
boolean isOpen = true;
while (isOpen && count < 8) {
count ++;
Thread.sleep(1000);
isOpen = false;
for (Session session : setA) {
if (session.isOpen()) {
isOpen = true;
break;
}
}
}
if (isOpen) {
for (Session session : setA) {
if (session.isOpen()) {
System.err.println("Session with ID [" + session.getId() +
"] is open");
}
}
Assert.fail("There were open sessions");
}
}
@Test
public void testSessionExpirySession() throws Exception {
Tomcat tomcat = getTomcatInstance();
// No file system docBase required
Context ctx = tomcat.addContext("", null);
ctx.addApplicationListener(TesterEchoServer.Config.class.getName());
Tomcat.addServlet(ctx, "default", new DefaultServlet());
ctx.addServletMappingDecoded("/", "default");
tomcat.start();
// Need access to implementation methods for configuring unit tests
WsWebSocketContainer wsContainer = (WsWebSocketContainer)
ContainerProvider.getWebSocketContainer();
// 5 second timeout
wsContainer.setDefaultMaxSessionIdleTimeout(5000);
wsContainer.setProcessPeriod(1);
EndpointA endpointA = new EndpointA();
Session s1a = connectToEchoServer(wsContainer, endpointA,
TesterEchoServer.Config.PATH_BASIC);
s1a.setMaxIdleTimeout(3000);
Session s2a = connectToEchoServer(wsContainer, endpointA,
TesterEchoServer.Config.PATH_BASIC);
s2a.setMaxIdleTimeout(6000);
Session s3a = connectToEchoServer(wsContainer, endpointA,
TesterEchoServer.Config.PATH_BASIC);
s3a.setMaxIdleTimeout(9000);
// Check all three sessions are open
Set<Session> setA = s3a.getOpenSessions();
int expected = 3;
while (expected > 0) {
Assert.assertEquals(expected, getOpenCount(setA));
int count = 0;
while (getOpenCount(setA) == expected && count < 50) {
count ++;
Thread.sleep(100);
}
expected--;
}
Assert.assertEquals(0, getOpenCount(setA));
}
private int getOpenCount(Set<Session> sessions) {
int result = 0;
for (Session session : sessions) {
if (session.isOpen()) {
result++;
}
}
return result;
}
private Session connectToEchoServer(WebSocketContainer wsContainer,
Endpoint endpoint, String path) throws Exception {
return wsContainer.connectToServer(endpoint,
ClientEndpointConfig.Builder.create().build(),
new URI("ws://" + getHostName() + ":" + getPort() + path));
}
public static final class EndpointA extends Endpoint {
@Override
public void onOpen(Session session, EndpointConfig config) {
// NO-OP
}
}
public static final class EndpointB extends Endpoint {
@Override
public void onOpen(Session session, EndpointConfig config) {
// NO-OP
}
}
@Test
public void testConnectToServerEndpointSSL() throws Exception {
Tomcat tomcat = getTomcatInstance();
// No file system docBase required
Context ctx = tomcat.addContext("", null);
ctx.addApplicationListener(TesterEchoServer.Config.class.getName());
Tomcat.addServlet(ctx, "default", new DefaultServlet());
ctx.addServletMappingDecoded("/", "default");
TesterSupport.initSsl(tomcat);
tomcat.start();
WebSocketContainer wsContainer =
ContainerProvider.getWebSocketContainer();
ClientEndpointConfig clientEndpointConfig =
ClientEndpointConfig.Builder.create().build();
clientEndpointConfig.getUserProperties().put(
WsWebSocketContainer.SSL_TRUSTSTORE_PROPERTY,
TesterSupport.CA_JKS);
Session wsSession = wsContainer.connectToServer(
TesterProgrammaticEndpoint.class,
clientEndpointConfig,
new URI("wss://" + getHostName() + ":" + getPort() +
TesterEchoServer.Config.PATH_ASYNC));
CountDownLatch latch = new CountDownLatch(1);
BasicText handler = new BasicText(latch);
wsSession.addMessageHandler(handler);
wsSession.getBasicRemote().sendText(MESSAGE_STRING_1);
boolean latchResult = handler.getLatch().await(10, TimeUnit.SECONDS);
Assert.assertTrue(latchResult);
Queue<String> messages = handler.getMessages();
Assert.assertEquals(1, messages.size());
Assert.assertEquals(MESSAGE_STRING_1, messages.peek());
}
@Test
public void testMaxMessageSize01() throws Exception {
doMaxMessageSize(TesterEchoServer.Config.PATH_BASIC_LIMIT_LOW,
TesterEchoServer.BasicLimitLow.MAX_SIZE - 1, true);
}
@Test
public void testMaxMessageSize02() throws Exception {
doMaxMessageSize(TesterEchoServer.Config.PATH_BASIC_LIMIT_LOW,
TesterEchoServer.BasicLimitLow.MAX_SIZE, true);
}
@Test
public void testMaxMessageSize03() throws Exception {
doMaxMessageSize(TesterEchoServer.Config.PATH_BASIC_LIMIT_LOW,
TesterEchoServer.BasicLimitLow.MAX_SIZE + 1, false);
}
@Test
public void testMaxMessageSize04() throws Exception {
doMaxMessageSize(TesterEchoServer.Config.PATH_BASIC_LIMIT_HIGH,
TesterEchoServer.BasicLimitHigh.MAX_SIZE - 1, true);
}
@Test
public void testMaxMessageSize05() throws Exception {
doMaxMessageSize(TesterEchoServer.Config.PATH_BASIC_LIMIT_HIGH,
TesterEchoServer.BasicLimitHigh.MAX_SIZE, true);
}
@Test
public void testMaxMessageSize06() throws Exception {
doMaxMessageSize(TesterEchoServer.Config.PATH_BASIC_LIMIT_HIGH,
TesterEchoServer.BasicLimitHigh.MAX_SIZE + 1, false);
}
private void doMaxMessageSize(String path, long size, boolean expectOpen)
throws Exception {
Tomcat tomcat = getTomcatInstance();
// No file system docBase required
Context ctx = tomcat.addContext("", null);
ctx.addApplicationListener(TesterEchoServer.Config.class.getName());
Tomcat.addServlet(ctx, "default", new DefaultServlet());
ctx.addServletMappingDecoded("/", "default");
tomcat.start();
WebSocketContainer wsContainer =
ContainerProvider.getWebSocketContainer();
Session s = connectToEchoServer(wsContainer, new EndpointA(), path);
// One for the client, one for the server
validateBackgroundProcessCount(2);
StringBuilder msg = new StringBuilder();
for (long i = 0; i < size; i++) {
msg.append('x');
}
s.getBasicRemote().sendText(msg.toString());
// Wait for up to 5 seconds for the client session to open
boolean open = s.isOpen();
int count = 0;
while (open != expectOpen && count < 50) {
Thread.sleep(100);
count++;
open = s.isOpen();
}
Assert.assertEquals(Boolean.valueOf(expectOpen),
Boolean.valueOf(s.isOpen()));
// Close the session if it is expected to be open
if (expectOpen) {
s.close();
}
// Ensure both server and client have shutdown
validateBackgroundProcessCount(0);
}
private void validateBackgroundProcessCount(int expected) throws Exception {
int count = 0;
while (count < (START_STOP_WAIT / 100)) {
if (BackgroundProcessManager.getInstance().getProcessCount() == expected) {
break;
}
Thread.sleep(100);
count++;
}
Assert.assertEquals(expected, BackgroundProcessManager.getInstance().getProcessCount());
}
@Test
public void testPerMessageDeflateClient01() throws Exception {
doTestPerMessageDeflateClient(MESSAGE_STRING_1, 1);
}
@Test
public void testPerMessageDeflateClient02() throws Exception {
doTestPerMessageDeflateClient(MESSAGE_EMPTY, 1);
}
@Test
public void testPerMessageDeflateClient03() throws Exception {
doTestPerMessageDeflateClient(MESSAGE_STRING_1, 2);
}
@Test
public void testPerMessageDeflateClient04() throws Exception {
doTestPerMessageDeflateClient(MESSAGE_EMPTY, 2);
}
private void doTestPerMessageDeflateClient(String msg, int count) throws Exception {
Tomcat tomcat = getTomcatInstance();
// No file system docBase required
Context ctx = tomcat.addContext("", null);
ctx.addApplicationListener(TesterEchoServer.Config.class.getName());
Tomcat.addServlet(ctx, "default", new DefaultServlet());
ctx.addServletMappingDecoded("/", "default");
tomcat.start();
Extension perMessageDeflate = new WsExtension(PerMessageDeflate.NAME);
List<Extension> extensions = new ArrayList<>(1);
extensions.add(perMessageDeflate);
ClientEndpointConfig clientConfig =
ClientEndpointConfig.Builder.create().extensions(extensions).build();
WebSocketContainer wsContainer =
ContainerProvider.getWebSocketContainer();
Session wsSession = wsContainer.connectToServer(
TesterProgrammaticEndpoint.class,
clientConfig,
new URI("ws://" + getHostName() + ":" + getPort() +
TesterEchoServer.Config.PATH_ASYNC));
CountDownLatch latch = new CountDownLatch(count);
BasicText handler = new BasicText(latch, msg);
wsSession.addMessageHandler(handler);
for (int i = 0; i < count; i++) {
wsSession.getBasicRemote().sendText(msg);
}
boolean latchResult = handler.getLatch().await(10, TimeUnit.SECONDS);
Assert.assertTrue(latchResult);
((WsWebSocketContainer) wsContainer).destroy();
}
/*
* Make this possible to override so sub-class can more easily test proxy
*/
protected String getHostName() {
return "localhost";
}
}
|
googleapis/google-cloud-java | 35,836 | java-geminidataanalytics/google-cloud-geminidataanalytics/src/test/java/com/google/cloud/geminidataanalytics/v1beta/DataAgentServiceClientTest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.geminidataanalytics.v1beta;
import static com.google.cloud.geminidataanalytics.v1beta.DataAgentServiceClient.ListAccessibleDataAgentsPagedResponse;
import static com.google.cloud.geminidataanalytics.v1beta.DataAgentServiceClient.ListDataAgentsPagedResponse;
import static com.google.cloud.geminidataanalytics.v1beta.DataAgentServiceClient.ListLocationsPagedResponse;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.testing.LocalChannelProvider;
import com.google.api.gax.grpc.testing.MockGrpcService;
import com.google.api.gax.grpc.testing.MockServiceHelper;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.resourcenames.ResourceName;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.collect.Lists;
import com.google.iam.v1.AuditConfig;
import com.google.iam.v1.Binding;
import com.google.iam.v1.GetIamPolicyRequest;
import com.google.iam.v1.Policy;
import com.google.iam.v1.SetIamPolicyRequest;
import com.google.longrunning.Operation;
import com.google.protobuf.AbstractMessage;
import com.google.protobuf.Any;
import com.google.protobuf.ByteString;
import com.google.protobuf.Empty;
import com.google.protobuf.FieldMask;
import com.google.protobuf.Timestamp;
import io.grpc.StatusRuntimeException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import javax.annotation.Generated;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@Generated("by gapic-generator-java")
public class DataAgentServiceClientTest {
private static MockDataAgentService mockDataAgentService;
private static MockLocations mockLocations;
private static MockServiceHelper mockServiceHelper;
private LocalChannelProvider channelProvider;
private DataAgentServiceClient client;
@BeforeClass
public static void startStaticServer() {
mockDataAgentService = new MockDataAgentService();
mockLocations = new MockLocations();
mockServiceHelper =
new MockServiceHelper(
UUID.randomUUID().toString(),
Arrays.<MockGrpcService>asList(mockDataAgentService, mockLocations));
mockServiceHelper.start();
}
@AfterClass
public static void stopServer() {
mockServiceHelper.stop();
}
@Before
public void setUp() throws IOException {
mockServiceHelper.reset();
channelProvider = mockServiceHelper.createChannelProvider();
DataAgentServiceSettings settings =
DataAgentServiceSettings.newBuilder()
.setTransportChannelProvider(channelProvider)
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = DataAgentServiceClient.create(settings);
}
@After
public void tearDown() throws Exception {
client.close();
}
@Test
public void listDataAgentsTest() throws Exception {
DataAgent responsesElement = DataAgent.newBuilder().build();
ListDataAgentsResponse expectedResponse =
ListDataAgentsResponse.newBuilder()
.setNextPageToken("")
.addAllDataAgents(Arrays.asList(responsesElement))
.build();
mockDataAgentService.addResponse(expectedResponse);
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
ListDataAgentsPagedResponse pagedListResponse = client.listDataAgents(parent);
List<DataAgent> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getDataAgentsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockDataAgentService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListDataAgentsRequest actualRequest = ((ListDataAgentsRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listDataAgentsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDataAgentService.addException(exception);
try {
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
client.listDataAgents(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listDataAgentsTest2() throws Exception {
DataAgent responsesElement = DataAgent.newBuilder().build();
ListDataAgentsResponse expectedResponse =
ListDataAgentsResponse.newBuilder()
.setNextPageToken("")
.addAllDataAgents(Arrays.asList(responsesElement))
.build();
mockDataAgentService.addResponse(expectedResponse);
String parent = "parent-995424086";
ListDataAgentsPagedResponse pagedListResponse = client.listDataAgents(parent);
List<DataAgent> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getDataAgentsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockDataAgentService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListDataAgentsRequest actualRequest = ((ListDataAgentsRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listDataAgentsExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDataAgentService.addException(exception);
try {
String parent = "parent-995424086";
client.listDataAgents(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listAccessibleDataAgentsTest() throws Exception {
DataAgent responsesElement = DataAgent.newBuilder().build();
ListAccessibleDataAgentsResponse expectedResponse =
ListAccessibleDataAgentsResponse.newBuilder()
.setNextPageToken("")
.addAllDataAgents(Arrays.asList(responsesElement))
.build();
mockDataAgentService.addResponse(expectedResponse);
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
ListAccessibleDataAgentsPagedResponse pagedListResponse =
client.listAccessibleDataAgents(parent);
List<DataAgent> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getDataAgentsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockDataAgentService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListAccessibleDataAgentsRequest actualRequest =
((ListAccessibleDataAgentsRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listAccessibleDataAgentsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDataAgentService.addException(exception);
try {
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
client.listAccessibleDataAgents(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listAccessibleDataAgentsTest2() throws Exception {
DataAgent responsesElement = DataAgent.newBuilder().build();
ListAccessibleDataAgentsResponse expectedResponse =
ListAccessibleDataAgentsResponse.newBuilder()
.setNextPageToken("")
.addAllDataAgents(Arrays.asList(responsesElement))
.build();
mockDataAgentService.addResponse(expectedResponse);
String parent = "parent-995424086";
ListAccessibleDataAgentsPagedResponse pagedListResponse =
client.listAccessibleDataAgents(parent);
List<DataAgent> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getDataAgentsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockDataAgentService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListAccessibleDataAgentsRequest actualRequest =
((ListAccessibleDataAgentsRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listAccessibleDataAgentsExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDataAgentService.addException(exception);
try {
String parent = "parent-995424086";
client.listAccessibleDataAgents(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getDataAgentTest() throws Exception {
DataAgent expectedResponse =
DataAgent.newBuilder()
.setName(DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]").toString())
.setDisplayName("displayName1714148973")
.setDescription("description-1724546052")
.putAllLabels(new HashMap<String, String>())
.setCreateTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setDeleteTime(Timestamp.newBuilder().build())
.setPurgeTime(Timestamp.newBuilder().build())
.build();
mockDataAgentService.addResponse(expectedResponse);
DataAgentName name = DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]");
DataAgent actualResponse = client.getDataAgent(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDataAgentService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetDataAgentRequest actualRequest = ((GetDataAgentRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getDataAgentExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDataAgentService.addException(exception);
try {
DataAgentName name = DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]");
client.getDataAgent(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getDataAgentTest2() throws Exception {
DataAgent expectedResponse =
DataAgent.newBuilder()
.setName(DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]").toString())
.setDisplayName("displayName1714148973")
.setDescription("description-1724546052")
.putAllLabels(new HashMap<String, String>())
.setCreateTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setDeleteTime(Timestamp.newBuilder().build())
.setPurgeTime(Timestamp.newBuilder().build())
.build();
mockDataAgentService.addResponse(expectedResponse);
String name = "name3373707";
DataAgent actualResponse = client.getDataAgent(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDataAgentService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetDataAgentRequest actualRequest = ((GetDataAgentRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getDataAgentExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDataAgentService.addException(exception);
try {
String name = "name3373707";
client.getDataAgent(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void createDataAgentTest() throws Exception {
DataAgent expectedResponse =
DataAgent.newBuilder()
.setName(DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]").toString())
.setDisplayName("displayName1714148973")
.setDescription("description-1724546052")
.putAllLabels(new HashMap<String, String>())
.setCreateTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setDeleteTime(Timestamp.newBuilder().build())
.setPurgeTime(Timestamp.newBuilder().build())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("createDataAgentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDataAgentService.addResponse(resultOperation);
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
DataAgent dataAgent = DataAgent.newBuilder().build();
String dataAgentId = "dataAgentId1752773622";
DataAgent actualResponse = client.createDataAgentAsync(parent, dataAgent, dataAgentId).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDataAgentService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateDataAgentRequest actualRequest = ((CreateDataAgentRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertEquals(dataAgent, actualRequest.getDataAgent());
Assert.assertEquals(dataAgentId, actualRequest.getDataAgentId());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createDataAgentExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDataAgentService.addException(exception);
try {
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
DataAgent dataAgent = DataAgent.newBuilder().build();
String dataAgentId = "dataAgentId1752773622";
client.createDataAgentAsync(parent, dataAgent, dataAgentId).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void createDataAgentTest2() throws Exception {
DataAgent expectedResponse =
DataAgent.newBuilder()
.setName(DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]").toString())
.setDisplayName("displayName1714148973")
.setDescription("description-1724546052")
.putAllLabels(new HashMap<String, String>())
.setCreateTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setDeleteTime(Timestamp.newBuilder().build())
.setPurgeTime(Timestamp.newBuilder().build())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("createDataAgentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDataAgentService.addResponse(resultOperation);
String parent = "parent-995424086";
DataAgent dataAgent = DataAgent.newBuilder().build();
String dataAgentId = "dataAgentId1752773622";
DataAgent actualResponse = client.createDataAgentAsync(parent, dataAgent, dataAgentId).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDataAgentService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateDataAgentRequest actualRequest = ((CreateDataAgentRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertEquals(dataAgent, actualRequest.getDataAgent());
Assert.assertEquals(dataAgentId, actualRequest.getDataAgentId());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createDataAgentExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDataAgentService.addException(exception);
try {
String parent = "parent-995424086";
DataAgent dataAgent = DataAgent.newBuilder().build();
String dataAgentId = "dataAgentId1752773622";
client.createDataAgentAsync(parent, dataAgent, dataAgentId).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void updateDataAgentTest() throws Exception {
DataAgent expectedResponse =
DataAgent.newBuilder()
.setName(DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]").toString())
.setDisplayName("displayName1714148973")
.setDescription("description-1724546052")
.putAllLabels(new HashMap<String, String>())
.setCreateTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setDeleteTime(Timestamp.newBuilder().build())
.setPurgeTime(Timestamp.newBuilder().build())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("updateDataAgentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDataAgentService.addResponse(resultOperation);
DataAgent dataAgent = DataAgent.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
DataAgent actualResponse = client.updateDataAgentAsync(dataAgent, updateMask).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDataAgentService.getRequests();
Assert.assertEquals(1, actualRequests.size());
UpdateDataAgentRequest actualRequest = ((UpdateDataAgentRequest) actualRequests.get(0));
Assert.assertEquals(dataAgent, actualRequest.getDataAgent());
Assert.assertEquals(updateMask, actualRequest.getUpdateMask());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void updateDataAgentExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDataAgentService.addException(exception);
try {
DataAgent dataAgent = DataAgent.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateDataAgentAsync(dataAgent, updateMask).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void deleteDataAgentTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("deleteDataAgentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDataAgentService.addResponse(resultOperation);
DataAgentName name = DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]");
client.deleteDataAgentAsync(name).get();
List<AbstractMessage> actualRequests = mockDataAgentService.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteDataAgentRequest actualRequest = ((DeleteDataAgentRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteDataAgentExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDataAgentService.addException(exception);
try {
DataAgentName name = DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]");
client.deleteDataAgentAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void deleteDataAgentTest2() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("deleteDataAgentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDataAgentService.addResponse(resultOperation);
String name = "name3373707";
client.deleteDataAgentAsync(name).get();
List<AbstractMessage> actualRequests = mockDataAgentService.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteDataAgentRequest actualRequest = ((DeleteDataAgentRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteDataAgentExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDataAgentService.addException(exception);
try {
String name = "name3373707";
client.deleteDataAgentAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void getIamPolicyTest() throws Exception {
Policy expectedResponse =
Policy.newBuilder()
.setVersion(351608024)
.addAllBindings(new ArrayList<Binding>())
.addAllAuditConfigs(new ArrayList<AuditConfig>())
.setEtag(ByteString.EMPTY)
.build();
mockDataAgentService.addResponse(expectedResponse);
ResourceName resource = DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]");
Policy actualResponse = client.getIamPolicy(resource);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDataAgentService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetIamPolicyRequest actualRequest = ((GetIamPolicyRequest) actualRequests.get(0));
Assert.assertEquals(resource.toString(), actualRequest.getResource());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getIamPolicyExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDataAgentService.addException(exception);
try {
ResourceName resource = DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]");
client.getIamPolicy(resource);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getIamPolicyTest2() throws Exception {
Policy expectedResponse =
Policy.newBuilder()
.setVersion(351608024)
.addAllBindings(new ArrayList<Binding>())
.addAllAuditConfigs(new ArrayList<AuditConfig>())
.setEtag(ByteString.EMPTY)
.build();
mockDataAgentService.addResponse(expectedResponse);
String resource = "resource-341064690";
Policy actualResponse = client.getIamPolicy(resource);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDataAgentService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetIamPolicyRequest actualRequest = ((GetIamPolicyRequest) actualRequests.get(0));
Assert.assertEquals(resource, actualRequest.getResource());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getIamPolicyExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDataAgentService.addException(exception);
try {
String resource = "resource-341064690";
client.getIamPolicy(resource);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void setIamPolicyTest() throws Exception {
Policy expectedResponse =
Policy.newBuilder()
.setVersion(351608024)
.addAllBindings(new ArrayList<Binding>())
.addAllAuditConfigs(new ArrayList<AuditConfig>())
.setEtag(ByteString.EMPTY)
.build();
mockDataAgentService.addResponse(expectedResponse);
ResourceName resource = DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]");
Policy actualResponse = client.setIamPolicy(resource);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDataAgentService.getRequests();
Assert.assertEquals(1, actualRequests.size());
SetIamPolicyRequest actualRequest = ((SetIamPolicyRequest) actualRequests.get(0));
Assert.assertEquals(resource.toString(), actualRequest.getResource());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void setIamPolicyExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDataAgentService.addException(exception);
try {
ResourceName resource = DataAgentName.of("[PROJECT]", "[LOCATION]", "[DATA_AGENT]");
client.setIamPolicy(resource);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void setIamPolicyTest2() throws Exception {
Policy expectedResponse =
Policy.newBuilder()
.setVersion(351608024)
.addAllBindings(new ArrayList<Binding>())
.addAllAuditConfigs(new ArrayList<AuditConfig>())
.setEtag(ByteString.EMPTY)
.build();
mockDataAgentService.addResponse(expectedResponse);
String resource = "resource-341064690";
Policy actualResponse = client.setIamPolicy(resource);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDataAgentService.getRequests();
Assert.assertEquals(1, actualRequests.size());
SetIamPolicyRequest actualRequest = ((SetIamPolicyRequest) actualRequests.get(0));
Assert.assertEquals(resource, actualRequest.getResource());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void setIamPolicyExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDataAgentService.addException(exception);
try {
String resource = "resource-341064690";
client.setIamPolicy(resource);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listLocationsTest() throws Exception {
Location responsesElement = Location.newBuilder().build();
ListLocationsResponse expectedResponse =
ListLocationsResponse.newBuilder()
.setNextPageToken("")
.addAllLocations(Arrays.asList(responsesElement))
.build();
mockLocations.addResponse(expectedResponse);
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
ListLocationsPagedResponse pagedListResponse = client.listLocations(request);
List<Location> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getLocationsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListLocationsRequest actualRequest = ((ListLocationsRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertEquals(request.getFilter(), actualRequest.getFilter());
Assert.assertEquals(request.getPageSize(), actualRequest.getPageSize());
Assert.assertEquals(request.getPageToken(), actualRequest.getPageToken());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listLocationsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
client.listLocations(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getLocationTest() throws Exception {
Location expectedResponse =
Location.newBuilder()
.setName("name3373707")
.setLocationId("locationId1541836720")
.setDisplayName("displayName1714148973")
.putAllLabels(new HashMap<String, String>())
.setMetadata(Any.newBuilder().build())
.build();
mockLocations.addResponse(expectedResponse);
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
Location actualResponse = client.getLocation(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetLocationRequest actualRequest = ((GetLocationRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getLocationExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
client.getLocation(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
}
|
apache/felix-dev | 35,671 | scr/src/test/java/org/apache/felix/scr/impl/metadata/ComponentMetadataTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.felix.scr.impl.metadata;
import java.lang.reflect.Array;
import java.util.List;
import org.osgi.service.component.ComponentException;
import junit.framework.TestCase;
public class ComponentMetadataTest extends TestCase
{
// test various combinations of component metadata with respect to
// -- immediate: true, false, unset
// -- factory: set, unset
// -- service: set, unset
// -- servicefactory: true, false, unset
public void testImmediate()
{
// immediate is default true if no service element is defined
final ComponentMetadata cm0 = createComponentMetadata( null, null );
cm0.validate( );
assertTrue( "Component without service must be immediate", cm0.isImmediate() );
// immediate is explicit true
final ComponentMetadata cm1 = createComponentMetadata( Boolean.TRUE, null );
cm1.validate( );
assertTrue( "Component must be immediate", cm1.isImmediate() );
// immediate is explicit true
final ComponentMetadata cm2 = createComponentMetadata( Boolean.TRUE, null );
cm2.setService( createServiceMetadata( null ) );
cm2.validate( );
assertTrue( "Component must be immediate", cm2.isImmediate() );
// immediate is explicit true
final ComponentMetadata cm3 = createComponentMetadata( Boolean.TRUE, null );
cm3.setService( createServiceMetadata( Boolean.FALSE ) );
cm3.validate( );
assertTrue( "Component must be immediate", cm3.isImmediate() );
// validation failure of immediate with service factory
final ComponentMetadata cm4 = createComponentMetadata( Boolean.TRUE, null );
cm4.setService( createServiceMetadata( Boolean.TRUE ) );
try
{
cm4.validate( );
fail( "Expect validation failure for immediate service factory" );
}
catch ( ComponentException ce )
{
// expect
}
}
public void testDelayed()
{
// immediate is default false if service element is defined
final ComponentMetadata cm0 = createComponentMetadata( null, null );
cm0.setService( createServiceMetadata( null ) );
cm0.validate( );
assertFalse( "Component with service must be delayed", cm0.isImmediate() );
// immediate is default false if service element is defined
final ComponentMetadata cm1 = createComponentMetadata( null, null );
cm1.setService( createServiceMetadata( Boolean.TRUE ) );
cm1.validate( );
assertFalse( "Component with service must be delayed", cm1.isImmediate() );
// immediate is default false if service element is defined
final ComponentMetadata cm2 = createComponentMetadata( null, null );
cm2.setService( createServiceMetadata( Boolean.FALSE ) );
cm2.validate( );
assertFalse( "Component with service must be delayed", cm2.isImmediate() );
// immediate is false if service element is defined
final ComponentMetadata cm3 = createComponentMetadata( Boolean.FALSE, null );
cm3.setService( createServiceMetadata( null ) );
cm3.validate( );
assertFalse( "Component with service must be delayed", cm3.isImmediate() );
// immediate is false if service element is defined
final ComponentMetadata cm4 = createComponentMetadata( Boolean.FALSE, null );
cm4.setService( createServiceMetadata( Boolean.TRUE ) );
cm4.validate( );
assertFalse( "Component with service must be delayed", cm4.isImmediate() );
// immediate is false if service element is defined
final ComponentMetadata cm5 = createComponentMetadata( Boolean.FALSE, null );
cm5.setService( createServiceMetadata( Boolean.FALSE ) );
cm5.validate( );
assertFalse( "Component with service must be delayed", cm5.isImmediate() );
// explicit delayed fails when there is no service
final ComponentMetadata cm6 = createComponentMetadata( Boolean.FALSE, null );
try
{
cm6.validate( );
fail( "Expect validation failure for delayed component without service" );
}
catch ( ComponentException ce )
{
// expect
}
}
public void testFactory()
{
// immediate is default false if factory is defined
final ComponentMetadata cm0 = createComponentMetadata( null, "factory" );
cm0.validate( );
assertFalse( "Component with factory must be delayed", cm0.isImmediate() );
// immediate is false if factory is defined
final ComponentMetadata cm1 = createComponentMetadata( Boolean.FALSE, "factory" );
cm1.validate( );
assertFalse( "Component with factory must be delayed", cm1.isImmediate() );
// immediate is default false if factory is defined
final ComponentMetadata cm2 = createComponentMetadata( Boolean.TRUE, "factory" );
try
{
cm2.validate( );
fail( "Expect validation failure for immediate factory component" );
}
catch ( ComponentException ce )
{
// expect
}
// immediate is default false if factory is defined
final ComponentMetadata cm10 = createComponentMetadata( null, "factory" );
cm10.setService( createServiceMetadata( null ) );
cm10.validate( );
assertFalse( "Component with factory must be delayed", cm10.isImmediate() );
// immediate is false if factory is defined
final ComponentMetadata cm11 = createComponentMetadata( Boolean.FALSE, "factory" );
cm11.setService( createServiceMetadata( null ) );
cm11.validate( );
assertFalse( "Component with factory must be delayed", cm11.isImmediate() );
// immediate is default false if factory is defined
final ComponentMetadata cm12 = createComponentMetadata( Boolean.TRUE, "factory" );
cm12.setService( createServiceMetadata( null ) );
try
{
cm12.validate( );
fail( "Expect validation failure for immediate factory component" );
}
catch ( ComponentException ce )
{
// expect
}
// immediate is default false if factory is defined
final ComponentMetadata cm20 = createComponentMetadata( null, "factory" );
cm20.setService( createServiceMetadata( Boolean.FALSE ) );
cm20.validate( );
assertFalse( "Component with factory must be delayed", cm20.isImmediate() );
// immediate is false if factory is defined
final ComponentMetadata cm21 = createComponentMetadata( Boolean.FALSE, "factory" );
cm21.setService( createServiceMetadata( Boolean.FALSE ) );
cm21.validate( );
assertFalse( "Component with factory must be delayed", cm21.isImmediate() );
// immediate is default false if factory is defined
final ComponentMetadata cm22 = createComponentMetadata( Boolean.TRUE, "factory" );
cm22.setService( createServiceMetadata( Boolean.FALSE ) );
try
{
cm22.validate( );
fail( "Expect validation failure for immediate factory component" );
}
catch ( ComponentException ce )
{
// expect
}
// immediate is default false if factory is defined
final ComponentMetadata cm30 = createComponentMetadata( null, "factory" );
cm30.setService( createServiceMetadata( Boolean.TRUE ) );
try
{
cm30.validate( );
fail( "Expect validation failure for factory component with service factory" );
}
catch ( ComponentException ce )
{
// expect
}
// immediate is false if factory is defined
final ComponentMetadata cm31 = createComponentMetadata( Boolean.FALSE, "factory" );
cm31.setService( createServiceMetadata( Boolean.TRUE ) );
try
{
cm31.validate( );
fail( "Expect validation failure for factory component with service factory" );
}
catch ( ComponentException ce )
{
// expect
}
// immediate is default false if factory is defined
final ComponentMetadata cm32 = createComponentMetadata( Boolean.TRUE, "factory" );
cm32.setService( createServiceMetadata( Boolean.TRUE ) );
try
{
cm32.validate( );
fail( "Expect validation failure for immediate factory component with service factory" );
}
catch ( ComponentException ce )
{
// expect
}
}
public void test_component_no_name_ds10()
{
final ComponentMetadata cm1 = createComponentMetadata( Boolean.TRUE, null );
cm1.setName( null );
try
{
cm1.validate( );
fail( "Expected validation failure for DS 1.0 component without name" );
}
catch ( ComponentException ce )
{
// expected
}
}
public void test_component_no_name_ds11()
{
final ComponentMetadata cm1 = createComponentMetadata11( Boolean.TRUE, null );
cm1.setName( null );
cm1.validate( );
assertEquals( "Expected name to equal implementation class name", cm1.getImplementationClassName(),
cm1.getName() );
}
public void test_component_activate_ds10()
{
final ComponentMetadata cm1 = createComponentMetadata( Boolean.TRUE, null );
cm1.validate( );
assertEquals( "Activate method name", "activate", cm1.getActivate() );
assertFalse( "Activate method expected to not be declared", cm1.isActivateDeclared() );
final ComponentMetadata cm2 = createComponentMetadata( Boolean.TRUE, null );
cm2.setActivate( "someMethod" );
failDS10Validation( cm2, "activate" );
}
public void test_component_activate_ds11()
{
final ComponentMetadata cm1 = createComponentMetadata11( Boolean.TRUE, null );
cm1.validate( );
assertEquals( "Activate method name", "activate", cm1.getActivate() );
assertFalse( "Activate method expected to not be declared", cm1.isActivateDeclared() );
final ComponentMetadata cm2 = createComponentMetadata11( Boolean.TRUE, null );
cm2.setActivate( "someMethod" );
cm2.validate( );
assertEquals( "Activate method name", "someMethod", cm2.getActivate() );
assertTrue( "Activate method expected to be declared", cm2.isActivateDeclared() );
}
public void test_component_deactivate_ds10()
{
final ComponentMetadata cm1 = createComponentMetadata( Boolean.TRUE, null );
cm1.validate( );
assertEquals( "Deactivate method name", "deactivate", cm1.getDeactivate() );
assertFalse( "Deactivate method expected to not be declared", cm1.isDeactivateDeclared() );
final ComponentMetadata cm2 = createComponentMetadata( Boolean.TRUE, null );
cm2.setDeactivate( "someMethod" );
failDS10Validation( cm2, "deactivate" );
}
public void test_component_deactivate_ds11()
{
final ComponentMetadata cm1 = createComponentMetadata11( Boolean.TRUE, null );
cm1.validate( );
assertEquals( "Deactivate method name", "deactivate", cm1.getDeactivate() );
assertFalse( "Deactivate method expected to not be declared", cm1.isDeactivateDeclared() );
final ComponentMetadata cm2 = createComponentMetadata11( Boolean.TRUE, null );
cm2.setDeactivate( "someMethod" );
cm2.validate( );
assertEquals( "Deactivate method name", "someMethod", cm2.getDeactivate() );
assertTrue( "Deactivate method expected to be declared", cm2.isDeactivateDeclared() );
}
public void test_component_modified_ds10()
{
final ComponentMetadata cm1 = createComponentMetadata( Boolean.TRUE, null );
cm1.validate( );
assertNull( "Modified method name", cm1.getModified() );
final ComponentMetadata cm2 = createComponentMetadata( Boolean.TRUE, null );
cm2.setModified( "someName" );
failDS10Validation( cm2, "modified" );
}
public void test_component_modified_ds11()
{
final ComponentMetadata cm1 = createComponentMetadata11( Boolean.TRUE, null );
cm1.validate( );
assertNull( "Modified method name", cm1.getModified() );
final ComponentMetadata cm2 = createComponentMetadata11( Boolean.TRUE, null );
cm2.setModified( "someMethod" );
cm2.validate( );
assertEquals( "Modified method name", "someMethod", cm2.getModified() );
}
public void test_component_configuration_policy_ds10()
{
final ComponentMetadata cm1 = createComponentMetadata( Boolean.TRUE, null );
cm1.validate( );
assertEquals( "Configuration policy", ComponentMetadata.CONFIGURATION_POLICY_OPTIONAL,
cm1.getConfigurationPolicy() );
final ComponentMetadata cm2 = createComponentMetadata( Boolean.TRUE, null );
cm2.setConfigurationPolicy( ComponentMetadata.CONFIGURATION_POLICY_IGNORE );
failDS10Validation( cm2, "configuration-policy" );
final ComponentMetadata cm3 = createComponentMetadata( Boolean.TRUE, null );
cm3.setConfigurationPolicy( ComponentMetadata.CONFIGURATION_POLICY_OPTIONAL );
failDS10Validation( cm3, "configuration-policy" );
final ComponentMetadata cm4 = createComponentMetadata( Boolean.TRUE, null );
cm4.setConfigurationPolicy( ComponentMetadata.CONFIGURATION_POLICY_REQUIRE );
failDS10Validation( cm4, "configuration-policy" );
final ComponentMetadata cm5 = createComponentMetadata( Boolean.TRUE, null );
cm5.setConfigurationPolicy( "undefined" );
failDS10Validation( cm5, "configuration-policy" );
}
public void test_component_configuration_policy_ds11()
{
final ComponentMetadata cm1 = createComponentMetadata11( Boolean.TRUE, null );
cm1.validate( );
assertEquals( "Configuration policy", ComponentMetadata.CONFIGURATION_POLICY_OPTIONAL,
cm1.getConfigurationPolicy() );
final ComponentMetadata cm2 = createComponentMetadata11( Boolean.TRUE, null );
cm2.setConfigurationPolicy( ComponentMetadata.CONFIGURATION_POLICY_IGNORE );
cm2.validate( );
assertEquals( "Configuration policy", ComponentMetadata.CONFIGURATION_POLICY_IGNORE,
cm2.getConfigurationPolicy() );
final ComponentMetadata cm3 = createComponentMetadata11( Boolean.TRUE, null );
cm3.setConfigurationPolicy( ComponentMetadata.CONFIGURATION_POLICY_OPTIONAL );
cm3.validate( );
assertEquals( "Configuration policy", ComponentMetadata.CONFIGURATION_POLICY_OPTIONAL,
cm3.getConfigurationPolicy() );
final ComponentMetadata cm4 = createComponentMetadata11( Boolean.TRUE, null );
cm4.setConfigurationPolicy( ComponentMetadata.CONFIGURATION_POLICY_REQUIRE );
cm4.validate( );
assertEquals( "Configuration policy", ComponentMetadata.CONFIGURATION_POLICY_REQUIRE,
cm4.getConfigurationPolicy() );
final ComponentMetadata cm5 = createComponentMetadata11( Boolean.TRUE, null );
cm5.setConfigurationPolicy( "undefined" );
try
{
cm5.validate( );
fail( "Expected validation failure due to undefined configuration policy" );
}
catch ( ComponentException ce )
{
// expected due to undefned configuration policy
}
}
public void test_reference_valid()
{
// two references, should validate
final ComponentMetadata cm1 = createComponentMetadata( Boolean.TRUE, null );
cm1.addDependency( createReferenceMetadata( "name1" ) );
cm1.addDependency( createReferenceMetadata( "name2" ) );
cm1.validate( );
}
public void test_reference_duplicate_name()
{
// two references with same name, must warn
final ComponentMetadata cm2 = createComponentMetadata( Boolean.TRUE, null );
cm2.addDependency( createReferenceMetadata( "name1" ) );
cm2.addDependency( createReferenceMetadata( "name1" ) );
try
{
cm2.validate( );
fail( "Expect validation failure for duplicate reference name" );
}
catch ( ComponentException ee )
{
//expected
}
}
public void test_reference_no_name_ds10()
{
// un-named reference, illegal for pre DS 1.1
final ComponentMetadata cm3 = createComponentMetadata( Boolean.TRUE, null );
cm3.addDependency( createReferenceMetadata( null ) );
try
{
cm3.validate( );
fail( "Expect validation failure for DS 1.0 reference without name" );
}
catch ( ComponentException ce )
{
// expected
}
}
public void test_reference_no_name_ds11()
{
// un-named reference, illegal for DS 1.1
final ComponentMetadata cm4 = createComponentMetadata11( Boolean.TRUE, null );
final ReferenceMetadata rm4 = createReferenceMetadata( null );
cm4.addDependency( rm4 );
cm4.validate( );
assertEquals( "Reference name defaults to interface", rm4.getInterface(), rm4.getName() );
}
public void test_reference_updated_ds10()
{
// updated method ignored for DS 1.0
final ReferenceMetadata rm3 = createReferenceMetadata( "test" );
rm3.setUpdated( "my_updated_method" );
final ComponentMetadata cm3 = createComponentMetadata( Boolean.TRUE, null );
cm3.addDependency( rm3 );
// according to DS 1.2 must fail validation (FELIX-3648)
failDS10Validation( cm3, "updated" );
}
public void test_reference_updated_ds11()
{
// updated method ignored for DS 1.1
final ReferenceMetadata rm3 = createReferenceMetadata( "test" );
rm3.setUpdated( "my_updated_method" );
final ComponentMetadata cm3 = createComponentMetadata11( Boolean.TRUE, null );
cm3.addDependency( rm3 );
// according to DS 1.2 must fail validation (FELIX-3648)
failDS10Validation( cm3, "updated" );
}
public void test_reference_updated_ds11_felix()
{
// updated method accepted for DS 1.1-felix
final ReferenceMetadata rm3 = createReferenceMetadata( "test" );
rm3.setUpdated( "my_updated_method" );
final ComponentMetadata cm3 = createComponentMetadata( DSVersion.DS11Felix, Boolean.TRUE, null );
cm3.addDependency( rm3 );
// validates fine and logs no message
cm3.validate( );
assertEquals( "my_updated_method", rm3.getUpdated() );
}
public void test_reference_updated_ds12()
{
// updated method accepted for DS 1.2
final ReferenceMetadata rm3 = createReferenceMetadata( "test" );
rm3.setUpdated( "my_updated_method" );
final ComponentMetadata cm3 = createComponentMetadata( DSVersion.DS12, Boolean.TRUE, null );
cm3.addDependency( rm3 );
// validates fine and logs no message
cm3.validate( );
assertEquals( "my_updated_method", rm3.getUpdated() );
}
public void test_duplicate_implementation_ds10()
{
final ComponentMetadata cm = createComponentMetadata( Boolean.TRUE, null );
cm.setImplementationClassName( "second.implementation.class" );
try
{
cm.validate( );
fail( "Expect validation failure for duplicate implementation element" );
}
catch ( ComponentException ce )
{
// expected
}
}
public void test_duplicate_implementation_ds11()
{
final ComponentMetadata cm = createComponentMetadata11( Boolean.TRUE, null );
cm.setImplementationClassName( "second.implementation.class" );
try
{
cm.validate( );
fail( "Expect validation failure for duplicate implementation element" );
}
catch ( ComponentException ce )
{
// expected
}
}
public void test_duplicate_service_ds10()
{
final ComponentMetadata cm = createComponentMetadata( Boolean.TRUE, null );
cm.setService( createServiceMetadata( Boolean.TRUE ) );
cm.setService( createServiceMetadata( Boolean.TRUE ) );
try
{
cm.validate( );
fail( "Expect validation failure for duplicate service element" );
}
catch ( ComponentException ce )
{
// expected
}
}
public void test_duplicate_service_ds11()
{
final ComponentMetadata cm = createComponentMetadata11( Boolean.TRUE, null );
cm.setService( createServiceMetadata( Boolean.TRUE ) );
cm.setService( createServiceMetadata( Boolean.TRUE ) );
try
{
cm.validate( );
fail( "Expect validation failure for duplicate service element" );
}
catch ( ComponentException ce )
{
// expected
}
}
public void test_property_no_name_ds10()
{
final ComponentMetadata cm = createComponentMetadata( null, null );
cm.addProperty( createPropertyMetadata( null, null, "" ) );
try
{
cm.validate( );
fail( "Expect validation failure for missing property name" );
}
catch ( ComponentException ce )
{
// expected
}
}
public void test_property_no_name_ds11()
{
final ComponentMetadata cm = createComponentMetadata11( null, null );
cm.addProperty( createPropertyMetadata( null, null, "" ) );
try
{
cm.validate( );
fail( "Expect validation failure for missing property name" );
}
catch ( ComponentException ce )
{
// expected
}
}
public void test_property_char_ds10() throws ComponentException
{
final ComponentMetadata cm = createComponentMetadata( null, null );
PropertyMetadata prop = createPropertyMetadata( "x", "Char", Integer.toString( 'x' ) );
cm.addProperty( prop );
cm.validate( );
assertTrue( prop.getValue() instanceof Character );
assertEquals( new Character( 'x' ), prop.getValue() );
}
public void test_property_char_ds11()
{
final ComponentMetadata cm = createComponentMetadata11( null, null );
cm.addProperty( createPropertyMetadata( "x", "Char", "x" ) );
try
{
cm.validate( );
fail( "Expect validation failure for illegal property type Char" );
}
catch ( ComponentException ce )
{
// expected
}
}
public void test_property_non_character()
{
final ComponentMetadata cm = createComponentMetadata( null, null );
assertProperty( "String", "Ein String", cm );
assertProperty( "Double", new Double( 2.5 ), cm );
assertProperty( "Float", new Float( 2.5 ), cm );
assertProperty( "Long", new Long( 2 ), cm );
assertProperty( "Integer", new Integer( 2 ), cm );
assertProperty( "Short", new Short( ( short ) 2 ), cm );
assertProperty( "Byte", new Byte( ( byte ) 2 ), cm );
assertProperty( "Boolean", Boolean.TRUE, cm );
assertPropertyFail( "Double", "x", cm );
assertPropertyFail( "Float", "x", cm );
assertPropertyFail( "Long", "x", cm );
assertPropertyFail( "Integer", "x", cm );
assertPropertyFail( "Short", "x", cm );
assertPropertyFail( "Byte", "x", cm );
}
public void test_property_array_non_character()
{
final ComponentMetadata cm = createComponentMetadata( null, null );
assertPropertyArray( "String", "Ein String", cm );
assertPropertyArray( "Double", new Double( 2.5 ), cm );
assertPropertyArray( "Float", new Float( 2.5 ), cm );
assertPropertyArray( "Long", new Long( 2 ), cm );
assertPropertyArray( "Integer", new Integer( 2 ), cm );
assertPropertyArray( "Short", new Short( ( short ) 2 ), cm );
assertPropertyArray( "Byte", new Byte( ( byte ) 2 ), cm );
assertPropertyArray( "Boolean", Boolean.TRUE, cm );
assertPropertyArrayFail( "Double", "x", cm );
assertPropertyArrayFail( "Float", "x", cm );
assertPropertyArrayFail( "Long", "x", cm );
assertPropertyArrayFail( "Integer", "x", cm );
assertPropertyArrayFail( "Short", "x", cm );
assertPropertyArrayFail( "Byte", "x", cm );
}
public void test_property_character_ds10()
{
final ComponentMetadata cm = createComponentMetadata( null, null );
try
{
createPropertyMetadata( "x", "Character", Integer.toString( 'x' ) ).validate( cm );
fail( "Expect validation failure for illegal property type Character" );
}
catch ( ComponentException ce )
{
// expected
}
}
public void test_configuration_pid_use_ds12()
{
ComponentMetadata cm = createComponentMetadata11( null, null );
try
{
cm.setConfigurationPid( new String[] {"configurationPid"} );
cm.validate( );
fail( "Expect validation failure for illegal configuration-pid usage in ds 1.1 namespace" );
}
catch ( ComponentException ce )
{
// expected
}
cm = createComponentMetadata12( null, null );
try
{
cm.setConfigurationPid( new String[] {"configurationPid"} );
cm.validate( );
}
catch ( ComponentException ce )
{
ce.printStackTrace();
fail( "Expect correct validation for legal configuration-pid usage in ds 1.2 or later namespace" );
}
}
public void test_get_configuration_pid_method()
{
doTest_get_configuration_pid_method(DSVersion.DS10);
doTest_get_configuration_pid_method(DSVersion.DS11);
doTest_get_configuration_pid_method(DSVersion.DS12);
}
private void doTest_get_configuration_pid_method(DSVersion specVersion)
{
// Make sure that getConfigurationPid returns the default component name (implementation class name).
// We only do this kind of test if spec is greater than ds 1.0, because in ds 1.0, the component name is mandatory.
if ( specVersion.isDS11() )
{
ComponentMetadata cm = new ComponentMetadata( specVersion );
try
{
cm.setImplementationClassName("implementation.class");
cm.setName( null );
cm.validate( );
}
catch ( ComponentException ce )
{
fail( "Expect correct validation for unnamed component" );
}
List<String> pid = cm.getConfigurationPid();
assertFalse( "Expect non-null configuration pid when component name is not specified", pid.isEmpty() );
assertEquals( "Expect configuration-pid to be equals to component implementation",
"implementation.class", pid.get( 0 ) );
}
// Make sure that getConfigurationPid returns the name of the component, if specified
ComponentMetadata cm = new ComponentMetadata( specVersion );
try
{
cm.setImplementationClassName("implementation.class");
cm.setName("my.component.name");
cm.validate( );
}
catch ( ComponentException ce )
{
fail( "Expect correct validation for named component" );
}
List<String> pid = cm.getConfigurationPid();
assertFalse( "Expect non-null configuration pid when component name is not specified", pid.isEmpty() );
assertEquals( "Expect configuration-pid to be equals to component name",
"my.component.name", pid.get( 0 ) );
}
public void test_property_character_ds11() throws ComponentException
{
final ComponentMetadata cm = createComponentMetadata11( null, null );
PropertyMetadata prop = createPropertyMetadata( "x", "Character", Integer.toString( 'x' ) );
cm.addProperty( prop );
cm.validate( );
assertTrue( prop.getValue() instanceof Character );
assertEquals( new Character( 'x' ), prop.getValue() );
}
//---------- Helper methods
// method also used by XmlHandlerTest
static void failDS10Validation( final ComponentMetadata metadata, final String expectedValidationReason )
{
try
{
metadata.validate( );
fail( "Expected validation failure for Component " + metadata.getName() + " containing '"
+ expectedValidationReason + "'" );
}
catch ( ComponentException ce )
{
assertTrue(
"Expected validation reason to contain '" + expectedValidationReason + "': actual: " + ce.getMessage(),
ce.getMessage().indexOf( expectedValidationReason ) >= 0 );
}
}
// Creates Component Metadata for the given namespace
private ComponentMetadata createComponentMetadata( DSVersion dsVersion, Boolean immediate, String factory )
{
ComponentMetadata meta = new ComponentMetadata( dsVersion );
meta.setName( "place.holder" );
meta.setImplementationClassName( "place.holder.implementation" );
if ( immediate != null )
{
meta.setImmediate( immediate.booleanValue() );
}
if ( factory != null )
{
meta.setFactoryIdentifier( factory );
}
return meta;
}
// Creates DS 1.0 Component Metadata
private ComponentMetadata createComponentMetadata( Boolean immediate, String factory )
{
return createComponentMetadata( DSVersion.DS10, immediate, factory );
}
// Creates DS 1.1 Component Metadata
private ComponentMetadata createComponentMetadata11( Boolean immediate, String factory )
{
return createComponentMetadata( DSVersion.DS11, immediate, factory );
}
// Creates DS 1.2 Component Metadata
private ComponentMetadata createComponentMetadata12( Boolean immediate, String factory )
{
return createComponentMetadata( DSVersion.DS12, immediate, factory );
}
private ServiceMetadata createServiceMetadata( Boolean serviceFactory )
{
ServiceMetadata meta = new ServiceMetadata();
meta.addProvide( "place.holder.service" );
if ( serviceFactory != null )
{
meta.setServiceFactory( serviceFactory.booleanValue() );
}
return meta;
}
private ReferenceMetadata createReferenceMetadata( String name )
{
ReferenceMetadata meta = new ReferenceMetadata();
meta.setName( name );
meta.setInterface( "place.holder" );
return meta;
}
private PropertyMetadata createPropertyMetadata( String propertyName, String type, String value )
{
PropertyMetadata meta = new PropertyMetadata();
if ( propertyName != null )
{
meta.setName( propertyName );
}
if ( type != null )
{
meta.setType( type );
}
if ( value != null )
{
meta.setValue( value );
}
return meta;
}
private void assertProperty( String type, Object value, ComponentMetadata cmeta )
{
PropertyMetadata meta = createPropertyMetadata( "dummy", type, String.valueOf( value ) );
meta.validate( cmeta );
assertSame( value.getClass(), meta.getValue().getClass() );
assertEquals( value, meta.getValue() );
}
private void assertPropertyArray( String type, Object value, ComponentMetadata cmeta )
{
PropertyMetadata meta = createPropertyMetadata( "dummy", type, null );
meta.setValues( String.valueOf( value ) );
meta.validate( cmeta );
Object propVal = meta.getValue();
assertTrue( propVal.getClass().isArray() );
assertPrimitiveType( value.getClass(), propVal.getClass().getComponentType() );
assertEquals( 1, Array.getLength( propVal ) );
assertEquals( value, Array.get( propVal, 0 ) );
}
private void assertPropertyFail( String type, String value, ComponentMetadata cmeta )
{
try
{
PropertyMetadata meta = createPropertyMetadata( "dummy", type, value );
meta.validate( cmeta );
fail( "Expected validation failure for " + type + "=" + value );
}
catch ( ComponentException ce )
{
// expected
}
}
private void assertPropertyArrayFail( String type, String value, ComponentMetadata cmeta )
{
try
{
PropertyMetadata meta = createPropertyMetadata( "dummy", type, null );
meta.setValues( value );
meta.validate( cmeta );
fail( "Expected validation failure for " + type + "=" + value );
}
catch ( ComponentException ce )
{
// expected
}
}
private void assertPrimitiveType(final Class<?> expectedBoxClass,
final Class<?> actualClass)
{
if ( expectedBoxClass == String.class )
{
assertEquals( expectedBoxClass, actualClass );
}
else if ( expectedBoxClass == Double.class )
{
assertEquals( Double.TYPE, actualClass );
}
else if ( expectedBoxClass == Float.class )
{
assertEquals( Float.TYPE, actualClass );
}
else if ( expectedBoxClass == Long.class )
{
assertEquals( Long.TYPE, actualClass );
}
else if ( expectedBoxClass == Integer.class )
{
assertEquals( Integer.TYPE, actualClass );
}
else if ( expectedBoxClass == Short.class )
{
assertEquals( Short.TYPE, actualClass );
}
else if ( expectedBoxClass == Byte.class )
{
assertEquals( Byte.TYPE, actualClass );
}
else if ( expectedBoxClass == Boolean.class )
{
assertEquals( Boolean.TYPE, actualClass );
}
else
{
fail( "Unexpected box class " + expectedBoxClass );
}
}
}
|
apache/hive | 35,729 | ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.udf.generic;
import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
/**
* TestGenericUDFTrunc.
*/
public class TestGenericUDFTrunc {
@Test
public void testStringToDateWithMonthFormat() throws HiveException {
GenericUDFTrunc udf = new GenericUDFTrunc();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ObjectInspector[] initArgs = { valueOI0, valueOI1};
DeferredObject valueObjFmt = new DeferredJavaObject(new Text("MONTH"));
DeferredObject valueObj0;
DeferredObject[] evalArgs;
// test date string
valueObj0 = new DeferredJavaObject(new Text("2014-01-01"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-01-14"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-01-31"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-02-02"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-02-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-02-28"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-02-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-02-03"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-02-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-02-28"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-02-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-02-29"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-02-01", udf, initArgs, evalArgs);
// test timestamp string
valueObj0 = new DeferredJavaObject(new Text("2014-01-01 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-01-14 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-01-31 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-02-02 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-02-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-02-28 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-02-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-02-03 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-02-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-02-28 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-02-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-02-29 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-02-01", udf, initArgs, evalArgs);
}
@Test
public void testStringToDateWithQuarterFormat() throws HiveException {
GenericUDFTrunc udf = new GenericUDFTrunc();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ObjectInspector[] initArgs = { valueOI0, valueOI1};
DeferredObject valueObjFmt = new DeferredJavaObject(new Text("QUARTER"));
DeferredObject valueObj0;
DeferredObject[] evalArgs;
// test date string
valueObj0 = new DeferredJavaObject(new Text("2014-01-01"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-01-14"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-01-31"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-02-02"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-02-28"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-02-03"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-02-28"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-02-29"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-05-11"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-04-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-07-01"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-07-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-12-31"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-10-01", udf, initArgs, evalArgs);
// test timestamp string
valueObj0 = new DeferredJavaObject(new Text("2014-01-01 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-01-14 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-01-31 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-02-02 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-02-28 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-02-03 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-02-28 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-02-29 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-05-11 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-04-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-07-01 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-07-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-12-31 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-10-01", udf, initArgs, evalArgs);
}
@Test
public void testStringToDateWithYearFormat() throws HiveException {
GenericUDFTrunc udf = new GenericUDFTrunc();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ObjectInspector[] initArgs = { valueOI0, valueOI1};
DeferredObject valueObjFmt = new DeferredJavaObject(new Text("YEAR"));
DeferredObject valueObj0;
DeferredObject[] evalArgs;
// test date string
valueObj0 = new DeferredJavaObject(new Text("2014-01-01"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-01-14"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-01-31"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-02-02"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-02-28"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-02-03"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-02-28"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-02-29"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
// test timestamp string
valueObj0 = new DeferredJavaObject(new Text("2014-01-01 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-01-14 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-01-31 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-02-02 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2014-02-28 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-02-03 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-02-28 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new Text("2016-02-29 10:30:45"));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
}
@Test
public void testTimestampToDateWithMonthFormat() throws HiveException {
GenericUDFTrunc udf = new GenericUDFTrunc();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ObjectInspector[] initArgs = { valueOI0, valueOI1};
DeferredObject valueObjFmt = new DeferredJavaObject(new Text("MON"));
DeferredObject valueObj0;
DeferredObject[] evalArgs;
// test date string
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-01-01 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-01-14 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-01-31 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-02-02 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-02-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-02-28 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-02-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-02-03 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-02-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-02-28 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-02-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-02-29 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-02-01", udf, initArgs, evalArgs);
// test timestamp string
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-01-01 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-01-14 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-01-31 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-02-02 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-02-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-02-28 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-02-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-02-03 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-02-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-02-28 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-02-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-02-29 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-02-01", udf, initArgs, evalArgs);
}
@Test
public void testTimestampToDateWithQuarterFormat() throws HiveException {
GenericUDFTrunc udf = new GenericUDFTrunc();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ObjectInspector[] initArgs = { valueOI0, valueOI1};
DeferredObject valueObjFmt = new DeferredJavaObject(new Text("Q"));
DeferredObject valueObj0;
DeferredObject[] evalArgs;
// test date string
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-01-01 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-01-14 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-01-31 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-02-02 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-02-28 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-02-03 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-02-28 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-02-29 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-05-11 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-04-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-07-01 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-07-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-12-31 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-10-01", udf, initArgs, evalArgs);
// test timestamp string
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-01-01 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-01-14 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-01-31 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-02-02 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-02-28 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-02-03 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-02-28 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-02-29 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-05-11 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-04-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-07-01 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-07-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-12-31 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-10-01", udf, initArgs, evalArgs);
}
@Test
public void testTimestampToDateWithYearFormat() throws HiveException {
GenericUDFTrunc udf = new GenericUDFTrunc();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ObjectInspector[] initArgs = { valueOI0, valueOI1};
DeferredObject valueObjFmt = new DeferredJavaObject(new Text("YYYY"));
DeferredObject valueObj0;
DeferredObject[] evalArgs;
// test date string
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-01-01 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-01-14 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-01-31 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-02-02 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-02-28 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-02-03 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-02-28 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-02-29 00:00:00")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
// test timestamp string
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-01-01 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-01-14 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-01-31 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-02-02 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2014-02-28 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-02-03 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-02-28 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
Timestamp.valueOf("2016-02-29 10:30:45")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
}
@Test
public void testDateWritableToDateWithMonthFormat() throws HiveException {
GenericUDFTrunc udf = new GenericUDFTrunc();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ObjectInspector[] initArgs = { valueOI0, valueOI1};
DeferredObject valueObjFmt = new DeferredJavaObject(new Text("MM"));
DeferredObject valueObj0;
DeferredObject[] evalArgs;
// test date string
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-01")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-14")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-31")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-02-02")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-02-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-02-28")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-02-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-03")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-02-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-28")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-02-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-29")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-02-01", udf, initArgs, evalArgs);
}
@Test
public void testDateWritableToDateWithQuarterFormat() throws HiveException {
GenericUDFTrunc udf = new GenericUDFTrunc();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ObjectInspector[] initArgs = { valueOI0, valueOI1};
DeferredObject valueObjFmt = new DeferredJavaObject(new Text("Q"));
DeferredObject valueObj0;
DeferredObject[] evalArgs;
// test date string
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-01")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-14")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-31")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-02-02")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-02-28")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-03")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-28")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-29")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-05-11")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-04-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-07-01")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-07-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-12-31")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-10-01", udf, initArgs, evalArgs);
}
@Test
public void testDateWritableToDateWithYearFormat() throws HiveException {
GenericUDFTrunc udf = new GenericUDFTrunc();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ObjectInspector[] initArgs = { valueOI0, valueOI1};
DeferredObject valueObjFmt = new DeferredJavaObject(new Text("YY"));
DeferredObject valueObj0;
DeferredObject[] evalArgs;
// test date string
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-01")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-14")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-31")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-02-02")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-02-28")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-03")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-28")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-29")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
}
private void runAndVerify(String expResult, GenericUDF udf, ObjectInspector[] initArgs,
DeferredObject[] evalArgs) throws HiveException {
udf.initialize(initArgs);
Text output = (Text) udf.evaluate(evalArgs);
assertEquals("frist_day() test ", expResult, output.toString());
}
}
|
apache/jackrabbit-filevault | 35,683 | vault-core-it/vault-core-integration-tests/src/main/java/org/apache/jackrabbit/vault/packaging/integration/PackageInstallIT.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.vault.packaging.integration;
import static org.apache.jackrabbit.vault.packaging.JcrPackageDefinition.PN_DEPENDENCIES;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.security.Principal;
import java.util.Collections;
import java.util.Properties;
import javax.jcr.NodeIterator;
import javax.jcr.Property;
import javax.jcr.PropertyType;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.jcr.SimpleCredentials;
import javax.jcr.Value;
import javax.jcr.nodetype.NodeType;
import org.apache.commons.io.IOUtils;
import org.apache.jackrabbit.api.JackrabbitSession;
import org.apache.jackrabbit.api.security.user.User;
import org.apache.jackrabbit.api.security.user.UserManager;
import org.apache.jackrabbit.commons.jackrabbit.authorization.AccessControlUtils;
import org.apache.jackrabbit.vault.fs.api.IdConflictPolicy;
import org.apache.jackrabbit.vault.fs.config.ConfigurationException;
import org.apache.jackrabbit.vault.fs.config.DefaultMetaInf;
import org.apache.jackrabbit.vault.fs.io.ImportOptions;
import org.apache.jackrabbit.vault.packaging.Dependency;
import org.apache.jackrabbit.vault.packaging.ExportOptions;
import org.apache.jackrabbit.vault.packaging.InstallContext;
import org.apache.jackrabbit.vault.packaging.JcrPackage;
import org.apache.jackrabbit.vault.packaging.PackageException;
import org.apache.jackrabbit.vault.packaging.PackageId;
import org.apache.jackrabbit.vault.packaging.VaultPackage;
import org.apache.jackrabbit.vault.packaging.events.impl.PackageEventDispatcherImpl;
import org.apache.jackrabbit.vault.packaging.impl.ActivityLog;
import org.apache.jackrabbit.vault.packaging.impl.JcrPackageManagerImpl;
import org.apache.jackrabbit.vault.packaging.registry.impl.JcrPackageRegistry;
import org.apache.jackrabbit.vault.packaging.registry.impl.JcrRegisteredPackage;
import org.junit.Assume;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
/**
* {@code PackageInstallIT}...
*/
public class PackageInstallIT extends IntegrationTestBase {
@Rule
public ExpectedException thrown = ExpectedException.none();
/**
* Installs a package that contains and checks if everything is correct.
*/
@Test
public void testUpload() throws RepositoryException, IOException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/tmp.zip"), false);
assertNotNull(pack);
assertPackageNodeExists(TMP_PACKAGE_ID);
// upload already unwraps it, so check if definition is ok
assertNodeExists(getInstallationPath(TMP_PACKAGE_ID) + "/jcr:content/vlt:definition");
// todo: check definition props
}
/**
* Test if rewrap of a small package works
*/
@Test
public void testRewrap() throws RepositoryException, IOException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/tmp.zip"), false);
assertNotNull(pack);
ImportOptions opts = getDefaultOptions();
pack.install(opts);
packMgr.rewrap(pack, opts.getListener());
}
/**
* Test if rewrap on a folder-based package works
*/
@Test
public void testRewrapOnNonZipBasedArchive() throws RepositoryException, IOException, PackageException {
try (VaultPackage pack = loadVaultPackage("/test-packages/tmp.zip")) {
ExportOptions opts = new ExportOptions();
DefaultMetaInf meta = new DefaultMetaInf();
Properties props = new Properties();
props.setProperty(VaultPackage.NAME_GROUP, "jackrabbit/test");
props.setProperty(VaultPackage.NAME_NAME, "rewrapped-package");
meta.setProperties(props);
opts.setMetaInf(meta);
try (VaultPackage rewrappedPack = packMgr.rewrap(opts, pack, (File)null)) {
assertNotNull(rewrappedPack);
}
}
}
/**
* Tests if uploading an already installed package preserves the status
*/
@Test
public void testUploadPreserveInstallStatus() throws RepositoryException, IOException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/tmp.zip"), true, true);
assertNotNull(pack);
assertTrue(pack.isValid());
assertPackageNodeExists(TMP_PACKAGE_ID);
pack.install(getDefaultOptions());
assertNodeExists("/tmp/foo");
long lastUnpacked = pack.getDefinition().getLastUnpacked().getTimeInMillis();
assertTrue(lastUnpacked > 0);
// now upload again, but don't install
pack = packMgr.upload(getStream("/test-packages/tmp.zip"), true, true);
assertNotNull(pack);
PackageId pkgId = pack.getDefinition().getId();
assertTrue(pack.isValid());
// installation status must be kept
assertTrue(pack.isInstalled());
assertEquals(lastUnpacked, pack.getDefinition().getLastUnpacked().getTimeInMillis());
// now re-acquire package and test again
pack = packMgr.open(pkgId);
assertTrue(pack.isValid());
assertTrue(pack.isInstalled());
assertEquals(lastUnpacked, pack.getDefinition().getLastUnpacked().getTimeInMillis());
// a package with a different created date should not preserve the status!
pack = packMgr.upload(getStream("/test-packages/tmp_with_modified_created_date.zip"), true, true);
assertNotNull(pack);
assertTrue(pack.isValid());
assertFalse(pack.isInstalled());
}
/**
* Tests if uploading an already installed SNAPSHOT version package drops the status
*/
@Test
public void testUploadDropInstallStatusForSnapshots() throws RepositoryException, IOException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/simple-snapshot-version.zip"), true, true);
assertNotNull(pack);
assertTrue(pack.isValid());
pack.install(getDefaultOptions());
assertTrue(pack.isInstalled());
long lastUnpacked = pack.getDefinition().getLastUnpacked().getTimeInMillis();
assertTrue(lastUnpacked > 0);
// now upload again, but don't install
pack = packMgr.upload(getStream("/test-packages/simple-snapshot-version.zip"), true, true);
assertNotNull(pack);
assertTrue(pack.isValid());
// installation status must not be kept
assertFalse(pack.isInstalled());
}
/**
* Installs a package that contains and checks if everything is correct.
*/
@Test
public void testUploadWithThumbnail() throws RepositoryException, IOException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/tmp_with_thumbnail.zip"), false);
assertNotNull(pack);
assertPackageNodeExists(TMP_PACKAGE_ID);
// upload already unrwapps it, so check if definition is ok
assertNodeExists(getInstallationPath(TMP_PACKAGE_ID) + "/jcr:content/vlt:definition/thumbnail.png");
}
/**
* Installs a package that contains /tmp/fullcoverage/a/aa using a vlt:FullCoverage mixin.
* check if the package manager installs that node type although not present in the package.
*/
@Test
public void testFullCoverageNT() throws RepositoryException, IOException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/fullcoverage.zip"), false);
assertNotNull(pack);
// just extract - no snapshots
pack.extract(getDefaultOptions());
assertNodeExists("/tmp/fullcoverage/a/aa");
admin.getWorkspace().getNodeTypeManager().getNodeType("vlt:FullCoverage");
}
/**
* Installs a package that contains a folder below a jcr:resource which is augmented by a
* mixin that should allow a sub folder.
*/
@Test
public void testDeepMixin() throws RepositoryException, IOException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/deepmixintest.zip"), false);
assertNotNull(pack);
// just extract - no snapshots
pack.extract(getDefaultOptions());
assertNodeExists("/etc/designs/apache/images/backgroundImage.png/jcr:content/dam:thumbnails/dam:thumbnail_48.png");
}
/**
* Installs a package that contains a folder a filter to a jcr:content[nt:unstructured] node.
* See bug #42562
*/
@Test
public void testJcrContent() throws RepositoryException, IOException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/tmp_testpage_jcr_content.zip"), false);
assertNotNull(pack);
// just extract - no snapshots
pack.extract(getDefaultOptions());
assertNodeExists("/tmp/testpage/jcr:content/foo");
}
/**
* Installs a package that just adds a property to the root node.
*/
@Test
public void testRootImport() throws RepositoryException, IOException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/testrootimport.zip"), false);
assertNotNull(pack);
// just extract - no snapshots
pack.extract(getDefaultOptions());
assertProperty("/testproperty", "hello");
}
/**
* Installs a package with an install hook
*/
@Test
public void testHook() throws RepositoryException, IOException, PackageException {
if (admin.nodeExists("/testroot")) {
admin.getNode("/testroot").remove();
}
admin.getRootNode().addNode("testroot", "nt:unstructured").addNode("testnode", "nt:unstructured");
admin.save();
JcrPackage pack = packMgr.upload(getStream("/test-packages/test_hook.zip"), false);
assertNotNull(pack);
pack.install(getDefaultOptions());
assertTrue(admin.propertyExists("/testroot/hook-example"));
}
/**
* Installs a package with an install hook and a not allowed user
*/
@Test
public void testHookWithNotAllowedNonAdminUser() throws RepositoryException, IOException, PackageException {
if (admin.nodeExists("/testroot")) {
admin.getNode("/testroot").remove();
}
admin.getRootNode().addNode("testroot", "nt:unstructured").addNode("testnode", "nt:unstructured");
admin.save();
// Create test user
UserManager userManager = ((JackrabbitSession)admin).getUserManager();
String userId = "user1";
String userPwd = "pwd1";
User user1 = userManager.createUser(userId, userPwd);
Principal principal1 = user1.getPrincipal();
// Setup test user ACLs that there are no restrictions
AccessControlUtils.addAccessControlEntry(admin, null, principal1, new String[]{"jcr:namespaceManagement","jcr:nodeTypeDefinitionManagement"}, true);
AccessControlUtils.addAccessControlEntry(admin, "/", principal1, new String[]{"jcr:all"}, true);
admin.save();
Session userSession = repository.login(new SimpleCredentials(userId, userPwd.toCharArray()));
try {
packMgr = new JcrPackageManagerImpl(userSession, new String[0]);
PackageEventDispatcherImpl dispatcher = new PackageEventDispatcherImpl();
dispatcher.bindPackageEventListener(new ActivityLog(), Collections.singletonMap("component.id", (Object) "1234"));
packMgr.setDispatcher(dispatcher);
JcrPackage pack = packMgr.upload(getStream("/test-packages/test_hook.zip"), false);
assertNotNull(pack);
thrown.expect(PackageException.class);
thrown.expectMessage("Package extraction requires admin session as it has a hook");
packMgr.getInternalRegistry().installPackage(userSession, new JcrRegisteredPackage(pack), getDefaultOptions(), true);
} finally {
userSession.logout();
}
}
/**
* Installs a package with an install hook and an explicitly allowed user
*/
@Test
public void testHookWithAllowedNonAdminUser() throws RepositoryException, IOException, PackageException {
if (admin.nodeExists("/testroot")) {
admin.getNode("/testroot").remove();
}
admin.getRootNode().addNode("testroot", "nt:unstructured").addNode("testnode", "nt:unstructured");
admin.save();
// Create test user
UserManager userManager = ((JackrabbitSession)admin).getUserManager();
String userId = "user1";
String userPwd = "pwd1";
User user1 = userManager.createUser(userId, userPwd);
Principal principal1 = user1.getPrincipal();
// Setup test user ACLs that there are no restrictions
AccessControlUtils.addAccessControlEntry(admin, null, principal1, new String[]{"jcr:namespaceManagement","jcr:nodeTypeDefinitionManagement"}, true);
AccessControlUtils.addAccessControlEntry(admin, "/", principal1, new String[]{"jcr:all"}, true);
admin.save();
Session userSession = repository.login(new SimpleCredentials(userId, userPwd.toCharArray()));
try {
packMgr = new JcrPackageManagerImpl(userSession, new String[0], new String[] {"user1"}, null, false, true, IdConflictPolicy.FAIL);
PackageEventDispatcherImpl dispatcher = new PackageEventDispatcherImpl();
dispatcher.bindPackageEventListener(new ActivityLog(), Collections.singletonMap("component.id", (Object) "1234"));
packMgr.setDispatcher(dispatcher);
JcrPackage pack = packMgr.upload(getStream("/test-packages/test_hook.zip"), false);
assertNotNull(pack);
packMgr.getInternalRegistry().installPackage(userSession, new JcrRegisteredPackage(pack), getDefaultOptions(), true);
assertTrue(admin.propertyExists("/testroot/hook-example"));
} finally {
userSession.logout();
}
}
/**
* Installs a package with an install hook
*/
@Test
public void testHookFail() throws RepositoryException, IOException, PackageException {
if (admin.nodeExists("/testroot")) {
admin.getNode("/testroot").remove();
}
admin.save();
JcrPackage pack = packMgr.upload(getStream("/test-packages/test_hook.zip"), false);
assertNotNull(pack);
try {
pack.install(getDefaultOptions());
fail("installing failing hook should fail");
} catch (PackageException e) {
// ok
}
}
/**
* Installs a package with an invalid hook
*/
@Test
public void testInvalidHook() throws RepositoryException, IOException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/invalid_hook.zip"), false);
assertNotNull(pack);
try {
pack.install(getDefaultOptions());
fail("Package install should fail.");
} catch (PackageException e) {
// ok
}
}
/**
* Installs a package with an external hook
*/
@Test
public void testExternalHook() throws RepositoryException, IOException, PackageException {
if (!admin.nodeExists("/testroot")) {
admin.getRootNode().addNode("testroot", "nt:unstructured");
admin.save();
}
JcrPackage pack = packMgr.upload(getStream("/test-packages/external_hook.zip"), false);
assertNotNull(pack);
pack.install(getDefaultOptions());
assertProperty("/testroot/TestHook1", InstallContext.Phase.END.toString());
assertProperty("/testroot/TestHook2", InstallContext.Phase.END.toString());
}
/**
* Installs a package with an external hook which throws an exception in the INSTALLED phase
*/
@Test
public void testExternalHookFailsInInstalledPhase() throws RepositoryException, IOException, PackageException {
try {
extractVaultPackage("/test-packages/external_hook_failing_in_installed_phase.zip");
fail("Package install should fail due to installhook exception.");
} catch (PackageException e) {
// ok
}
// although there was some exception, this has only been triggered after the package has been extracted!
assertNodeExists("/testroot");
}
/**
* Installs a package with no properties
*/
@Test
public void testNoProperties() throws RepositoryException, IOException, PackageException {
try (JcrPackage pack = packMgr.upload(getFile("/test-packages/tmp_no_properties.zip"), false, true, "tmp_no_properties",
false)) {
assertNotNull(pack);
pack.install(getDefaultOptions());
}
}
/**
* Installs a package with non-child filter doesn't remove the root.
*
* <pre>
* <workspaceFilter version="1.0">
* <filter root="/etc">
* <include pattern="/etc"/>
* <include pattern="/etc/clientlibs"/>
* <include pattern="/etc/clientlibs/granite"/>
* <include pattern="/etc/clientlibs/granite/test(/.*)?"/>
* </filter>
* </workspaceFilter>
*/
@Test
public void testNoChildFilter() throws RepositoryException, IOException, PackageException {
try (JcrPackage pack = packMgr.upload(getFile("/test-packages/test-package-with-etc.zip"), false, true,
"test-package-with-etc", false)) {
assertNodeExists("/etc");
admin.getNode("/etc").addNode("foo", NodeType.NT_FOLDER);
admin.save();
pack.install(getDefaultOptions());
assertNodeExists("/etc/foo");
}
}
@Test
public void testDeepContentImport() throws IOException, RepositoryException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/tmp_test_deep.zip"), false);
assertNotNull(pack);
pack.install(getDefaultOptions());
assertNodeExists("/tmp/test/content/foo/jcr:content/a/b/foo.jsp/jcr:content");
assertNodeExists("/tmp/test/content/foo/jcr:content/a/c/resource");
assertNodeExists("/tmp/test/content/foo/jcr:content/a/d");
assertNodeExists("/tmp/test/content/foo/jcr:content/a/folder/file.txt/jcr:content");
}
/**
* installs a package that contains a node with childnode ordering and full-coverage sub nodes.
* see JCRVLT-24
*/
@Test
public void testChildNodeOrder() throws IOException, RepositoryException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/test_childnodeorder.zip"), false);
assertNotNull(pack);
pack.install(getDefaultOptions());
assertNodeExists("/tmp/ordertest/test/rail/items/modes/items");
NodeIterator iter = admin.getNode("/tmp/ordertest/test/rail/items/modes/items").getNodes();
StringBuilder names = new StringBuilder();
while (iter.hasNext()) {
names.append(iter.nextNode().getName()).append(",");
}
assertEquals("child order", "a,d,b,c,", names.toString());
}
/**
* installs a package that contains a node with childnode ordering and full-coverage sub nodes.
* see JCRVLT-44
*/
@Test
public void testChildNodeOrder2() throws IOException, RepositoryException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/test_childnodeorder2.zip"), false);
assertNotNull(pack);
pack.install(getDefaultOptions());
assertNodeExists("/tmp/test/en");
NodeIterator iter = admin.getNode("/tmp/test/en").getNodes();
StringBuilder names = new StringBuilder();
while (iter.hasNext()) {
names.append(iter.nextNode().getName()).append(",");
}
assertEquals("child order", "jcr:content,toolbar,products,services,company,events,support,community,blog,", names.toString());
}
/**
* Installs a package that and checks if snapshot is created
*/
@Test
public void testSnapshotExists() throws RepositoryException, IOException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/tmp.zip"), false);
assertNotNull(pack);
pack.install(getDefaultOptions());
assertPackageNodeExists(TMP_SNAPSHOT_PACKAGE_ID);
assertNodeExists("/tmp/foo/bar/tobi");
}
/**
* Installs and uninstalls a package that and checks if the content is reverted.
*/
@Test
public void testUninstall() throws RepositoryException, IOException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/tmp.zip"), false);
assertNotNull(pack);
pack.install(getDefaultOptions());
assertNodeExists("/tmp/foo/bar/tobi");
pack.uninstall(getDefaultOptions());
assertNodeMissing("/tmp/foo/bar/tobi");
}
/**
* Uninstalls a package that has no snapshot (JCRVLT-89)
*/
@Test
public void testUninstallNoSnapshot() throws RepositoryException, IOException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/tmp.zip"), false);
assertNotNull(pack);
// extract should not generate snapshots
ImportOptions opts = getDefaultOptions();
opts.setStrict(false);
pack.extract(opts);
assertNodeExists("/tmp/foo/bar/tobi");
assertPackageNodeMissing(TMP_SNAPSHOT_PACKAGE_ID);
pack.uninstall(opts);
assertNodeExists("/tmp/foo/bar/tobi");
}
/**
* Checks if uninstalling a package in strict mode with no snapshot fails (JCRVLT-89).
*/
@Test
public void testUninstallNoSnapshotStrict() throws RepositoryException, IOException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/tmp.zip"), false);
assertNotNull(pack);
// extract should not generate snapshots
pack.extract(getDefaultOptions());
assertNodeExists("/tmp/foo/bar/tobi");
assertPackageNodeMissing(TMP_SNAPSHOT_PACKAGE_ID);
ImportOptions opts = getDefaultOptions();
opts.setStrict(true);
try {
pack.uninstall(opts);
fail("uninstalling a package with no snapshot should fail in strict mode.");
} catch (PackageException e) {
// ok
}
}
/**
* Installs a binary properties.
*/
@Test
public void testBinaryProperties() throws RepositoryException, IOException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/tmp_binary.zip"), false);
assertNotNull(pack);
pack.install(getDefaultOptions());
Property p = admin.getProperty("/tmp/binary/test/jcr:data");
assertEquals(PropertyType.BINARY, p.getType());
StringBuilder buffer = new StringBuilder(8192);
while (buffer.length() < 8192) {
buffer.append("0123456789abcdef");
}
String result = IOUtils.toString(p.getBinary().getStream());
assertEquals(buffer.toString(), result);
}
/**
* Installs a binary properties twice to check if it doesn't report an update.
* TODO: this is not implemented yet. see JCRVLT-110
*/
@Test
@Ignore
public void testBinaryPropertyTwice() throws RepositoryException, IOException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/tmp_binary.zip"), false);
assertNotNull(pack);
pack.install(getDefaultOptions());
Property p = admin.getProperty("/tmp/binary/test/jcr:data");
assertEquals(PropertyType.BINARY, p.getType());
StringBuilder buffer = new StringBuilder(8192);
while (buffer.length() < 8192) {
buffer.append("0123456789abcdef");
}
String result = IOUtils.toString(p.getBinary().getStream());
assertEquals(buffer.toString(), result);
// install again to check if binary data is not updated
ImportOptions opts = getDefaultOptions();
TrackingListener listener = new TrackingListener(opts.getListener());
opts.setListener(listener);
pack.install(opts);
//TODO: assertEquals("-", listener.getActions().get("/tmp/binary/test"));
assertEquals("U", listener.getActions().get("/tmp/binary/test"));
}
/**
* Test is binaries outside the filter are not imported (JCRVLT-126)
*/
@Test
public void testBinaryPropertiesOutsideFilter() throws RepositoryException, IOException, PackageException {
// first install the package once to create the intermediate nodes
JcrPackage pack = packMgr.upload(getStream("/test-packages/test_filter_binary.zip"), false);
assertNotNull(pack);
pack.install(getDefaultOptions());
assertProperty("/tmp/test", "123");
// delete the binary properties
if (admin.itemExists("/root-binary-property")) {
admin.removeItem("/root-binary-property");
}
admin.removeItem("/tmp/tmp-binary-property");
admin.removeItem("/tmp/test");
admin.removeItem("/tmp/test-project");
admin.save();
assertPropertyMissing("/root-binary-property");
assertPropertyMissing("/tmp/tmp-binary-property");
// now install again and check if the properties are still missing
pack.install(getDefaultOptions());
assertPropertyMissing("/tmp/test");
assertPropertyMissing("/root-binary-property");
assertPropertyMissing("/tmp/tmp-binary-property");
}
/**
* Installs a package with a different node type
*/
@Test
public void testNodeTypeChange() throws RepositoryException, IOException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/tmp.zip"), false);
assertNotNull(pack);
assertPackageNodeExists(TMP_PACKAGE_ID);
ImportOptions opts = getDefaultOptions();
pack.install(opts);
assertNodeExists("/tmp/foo");
assertEquals(admin.getNode("/tmp").getPrimaryNodeType().getName(), "sling:OrderedFolder");
pack = packMgr.upload(getStream("/test-packages/tmp_nt_folder.zip"), false);
assertNotNull(pack);
assertPackageNodeExists(TMP_PACKAGE_ID);
pack.install(opts);
assertNodeExists("/tmp/foo");
assertEquals(admin.getNode("/tmp").getPrimaryNodeType().getName(), "nt:folder");
}
/**
* Installs a package with versioned nodes
*/
@Test
public void testVersionInstall() throws RepositoryException, IOException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/test_version.zip"), false);
assertNotNull(pack);
ImportOptions opts = getDefaultOptions();
pack.install(opts);
assertProperty("/testroot/a/test", "123");
assertProperty("/testroot/a/jcr:isCheckedOut", "false");
// modify
admin.getWorkspace().getVersionManager().checkout("/testroot/a");
admin.getProperty("/testroot/a/test").setValue("test");
admin.save();
admin.getWorkspace().getVersionManager().checkin("/testroot/a");
// install a 2nd time
opts = getDefaultOptions();
pack.install(opts);
assertProperty("/testroot/a/test", "123");
assertProperty("/testroot/a/jcr:isCheckedOut", "false");
}
/**
* Installs a package with versions retains checked out state
*/
@Test
public void testVersionInstallCheckedOut() throws RepositoryException, IOException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/test_version.zip"), false);
assertNotNull(pack);
ImportOptions opts = getDefaultOptions();
pack.install(opts);
assertProperty("/testroot/a/test", "123");
assertProperty("/testroot/a/jcr:isCheckedOut", "false");
// modify
admin.getWorkspace().getVersionManager().checkout("/testroot/a");
admin.getProperty("/testroot/a/test").setValue("test");
admin.save();
// install a 2nd time
opts = getDefaultOptions();
pack.install(opts);
assertProperty("/testroot/a/test", "123");
assertProperty("/testroot/a/jcr:isCheckedOut", "false");
}
/**
* Installs a package with invalid dependency strings. see JCRVLT-265
*/
@Test
public void testInvalidDependenciesInProperties() throws RepositoryException, IOException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/null-dependency-test.zip"), false);
assertNotNull(pack);
for (Dependency dep: pack.getDefinition().getDependencies()) {
assertNotNull("dependency element", dep);
}
}
/**
* Creates a package definition with invalid dependencies. see JCRVLT-265
*/
@Test
public void testInvalidDependenciesInDefinition() throws RepositoryException, IOException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/null-dependency-test.zip"), false);
assertNotNull(pack);
Dependency[] deps = {new Dependency(TMP_PACKAGE_ID), null};
pack.getDefinition().setDependencies(deps, true);
for (Dependency dep: pack.getDefinition().getDependencies()) {
assertNotNull("dependency element", dep);
}
Value[] values = {admin.getValueFactory().createValue("")};
pack.getDefNode().setProperty(PN_DEPENDENCIES, values);
admin.save();
pack = packMgr.open(pack.getDefinition().getId());
for (Dependency dep: pack.getDefinition().getDependencies()) {
assertNotNull("dependency element", dep);
}
}
/**
* Tests if package installation works w/o RW access to / and /tmp.
* this currently fails, due to the creation of the snapshot.
* also see {@link NoRootAccessExportIT#exportNoRootAccess()}
*/
@Test
@Ignore("JCRVLT-100")
public void testInstallWithoutRootAndTmpAccess() throws IOException, RepositoryException, ConfigurationException, PackageException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/tmp_foo.zip"), true, true);
assertNotNull(pack);
assertTrue(pack.isValid());
PackageId id = pack.getPackage().getId();
pack.close();
// Create test user
UserManager userManager = ((JackrabbitSession)admin).getUserManager();
String userId = "user1";
String userPwd = "pwd1";
User user1 = userManager.createUser(userId, userPwd);
Principal principal1 = user1.getPrincipal();
// Create /tmp folder
admin.getRootNode().addNode("tmp").addNode("foo");
admin.save();
// Setup test user ACLs such that the
// root node is not accessible
AccessControlUtils.addAccessControlEntry(admin, null, principal1, new String[]{"jcr:namespaceManagement","jcr:nodeTypeDefinitionManagement"}, true);
AccessControlUtils.addAccessControlEntry(admin, "/", principal1, new String[]{"jcr:all"}, false);
AccessControlUtils.addAccessControlEntry(admin, ((JcrPackageRegistry)packMgr.getRegistry()).getPackRootPaths()[0], principal1, new String[]{"jcr:all"}, true);
AccessControlUtils.addAccessControlEntry(admin, "/tmp/foo", principal1, new String[]{"jcr:all"}, true);
admin.save();
Session session = repository.login(new SimpleCredentials(userId, userPwd.toCharArray()));
JcrPackageManagerImpl userPackMgr = new JcrPackageManagerImpl(session, new String[0]);
pack = userPackMgr.open(id);
ImportOptions opts = getDefaultOptions();
pack.install(opts);
pack.close();
session.logout();
assertNodeExists("/tmp/foo/bar/tobi");
}
/**
* Test if package extraction works w/o RW access to / and /tmp.
*/
@Test
public void testExtractWithoutRootAndTmpAccess() throws IOException, RepositoryException, ConfigurationException, PackageException {
Assume.assumeTrue(!isOak());
JcrPackage pack = packMgr.upload(getStream("/test-packages/tmp_foo.zip"), true, true);
assertNotNull(pack);
assertTrue(pack.isValid());
PackageId id = pack.getPackage().getId();
pack.close();
// Create test user
UserManager userManager = ((JackrabbitSession)admin).getUserManager();
String userId = "user1";
String userPwd = "pwd1";
User user1 = userManager.createUser(userId, userPwd);
Principal principal1 = user1.getPrincipal();
// Create /tmp folder
admin.getRootNode().addNode("tmp").addNode("foo");
admin.save();
// Setup test user ACLs such that the
// root node is not accessible
AccessControlUtils.addAccessControlEntry(admin, null, principal1, new String[]{"jcr:namespaceManagement","jcr:nodeTypeDefinitionManagement"}, true);
AccessControlUtils.addAccessControlEntry(admin, "/", principal1, new String[]{"jcr:all"}, false);
AccessControlUtils.addAccessControlEntry(admin, ((JcrPackageRegistry)packMgr.getRegistry()).getPackRootPaths()[0], principal1, new String[]{"jcr:all"}, true);
AccessControlUtils.addAccessControlEntry(admin, "/tmp/foo", principal1, new String[]{"jcr:all"}, true);
admin.save();
Session session = repository.login(new SimpleCredentials(userId, userPwd.toCharArray()));
JcrPackageManagerImpl userPackMgr = new JcrPackageManagerImpl(session, new String[0]);
pack = userPackMgr.open(id);
ImportOptions opts = getDefaultOptions();
opts.setStrict(false);
pack.extract(opts);
pack.close();
session.logout();
assertNodeExists("/tmp/foo/bar/tobi");
}
/**
* Tests if installing a package with a 0-mtime entry works with java9.
* see http://bugs.java.com/view_bug.do?bug_id=JDK-8184940
*/
@Test
public void testPackageInstallWith0MtimeZipEntry() throws IOException, RepositoryException, NoSuchFieldException, IllegalAccessException {
JcrPackage pack = packMgr.upload(getStream("/test-packages/properties-with-0mtime.zip"), true, true);
assertEquals("packageid", TMP_PACKAGE_ID, pack.getDefinition().getId());
}
// todo: upload with version
// todo: rename
} |
oracle/graalpython | 36,113 | graalpython/com.oracle.graal.python/src/com/oracle/graal/python/builtins/modules/ast/Sst2ObjVisitor.java | /*
* Copyright (c) 2019, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* The Universal Permissive License (UPL), Version 1.0
*
* Subject to the condition set forth below, permission is hereby granted to any
* person obtaining a copy of this software, associated documentation and/or
* data (collectively the "Software"), free of charge and under any and all
* copyright rights in the Software, and any and all patent rights owned or
* freely licensable by each licensor hereunder covering either (i) the
* unmodified Software as contributed to or provided by such licensor, or (ii)
* the Larger Works (as defined below), to deal in both
*
* (a) the Software, and
*
* (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
* one is included with the Software each a "Larger Work" to which the Software
* is contributed by such licensors),
*
* without restriction, including without limitation the rights to copy, create
* derivative works of, display, perform, and distribute the Software and make,
* use, sell, offer for sale, import, export, have made, and have sold the
* Software and the Larger Work(s), and to sublicense the foregoing rights on
* either these or other terms.
*
* This license is subject to the following condition:
*
* The above copyright notice and either this complete permission notice or at a
* minimum a reference to the UPL must be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
// Checkstyle: stop
// JaCoCo Exclude
//@formatter:off
// Generated from Python.asdl by main_asdl_gen.py
package com.oracle.graal.python.builtins.modules.ast;
import static com.oracle.truffle.api.CompilerDirectives.shouldNotReachHere;
import com.oracle.graal.python.builtins.objects.object.PythonObject;
import com.oracle.graal.python.pegparser.sst.ModTy;
import com.oracle.graal.python.pegparser.sst.StmtTy;
import com.oracle.graal.python.pegparser.sst.ExprTy;
import com.oracle.graal.python.pegparser.sst.ExprContextTy;
import com.oracle.graal.python.pegparser.sst.BoolOpTy;
import com.oracle.graal.python.pegparser.sst.OperatorTy;
import com.oracle.graal.python.pegparser.sst.UnaryOpTy;
import com.oracle.graal.python.pegparser.sst.CmpOpTy;
import com.oracle.graal.python.pegparser.sst.ComprehensionTy;
import com.oracle.graal.python.pegparser.sst.ExceptHandlerTy;
import com.oracle.graal.python.pegparser.sst.ArgumentsTy;
import com.oracle.graal.python.pegparser.sst.ArgTy;
import com.oracle.graal.python.pegparser.sst.KeywordTy;
import com.oracle.graal.python.pegparser.sst.AliasTy;
import com.oracle.graal.python.pegparser.sst.WithItemTy;
import com.oracle.graal.python.pegparser.sst.MatchCaseTy;
import com.oracle.graal.python.pegparser.sst.PatternTy;
import com.oracle.graal.python.pegparser.sst.TypeIgnoreTy;
import com.oracle.graal.python.pegparser.sst.TypeParamTy;
final class Sst2ObjVisitor extends Sst2ObjVisitorBase {
private final AstState state;
Sst2ObjVisitor(AstState state) {
this.state = state;
}
@Override
public Object visit(ModTy.Module node) {
PythonObject o = createPythonObject(state.clsModule);
o.setAttribute(AstState.T_F_BODY, seq2List(node.body));
o.setAttribute(AstState.T_F_TYPE_IGNORES, seq2List(node.typeIgnores));
return o;
}
@Override
public Object visit(ModTy.Interactive node) {
PythonObject o = createPythonObject(state.clsInteractive);
o.setAttribute(AstState.T_F_BODY, seq2List(node.body));
return o;
}
@Override
public Object visit(ModTy.Expression node) {
PythonObject o = createPythonObject(state.clsExpression);
o.setAttribute(AstState.T_F_BODY, visitNonNull(node.body));
return o;
}
@Override
public Object visit(ModTy.FunctionType node) {
PythonObject o = createPythonObject(state.clsFunctionType);
o.setAttribute(AstState.T_F_ARGTYPES, seq2List(node.argTypes));
o.setAttribute(AstState.T_F_RETURNS, visitNonNull(node.returns));
return o;
}
@Override
public Object visit(StmtTy.FunctionDef node) {
PythonObject o = createPythonObject(state.clsFunctionDef);
o.setAttribute(AstState.T_F_NAME, visitNonNull(node.name));
o.setAttribute(AstState.T_F_ARGS, visitNonNull(node.args));
o.setAttribute(AstState.T_F_BODY, seq2List(node.body));
o.setAttribute(AstState.T_F_DECORATOR_LIST, seq2List(node.decoratorList));
o.setAttribute(AstState.T_F_RETURNS, visitNullable(node.returns));
o.setAttribute(AstState.T_F_TYPE_COMMENT, visitNullableStringOrByteArray(node.typeComment));
o.setAttribute(AstState.T_F_TYPE_PARAMS, seq2List(node.typeParams));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.AsyncFunctionDef node) {
PythonObject o = createPythonObject(state.clsAsyncFunctionDef);
o.setAttribute(AstState.T_F_NAME, visitNonNull(node.name));
o.setAttribute(AstState.T_F_ARGS, visitNonNull(node.args));
o.setAttribute(AstState.T_F_BODY, seq2List(node.body));
o.setAttribute(AstState.T_F_DECORATOR_LIST, seq2List(node.decoratorList));
o.setAttribute(AstState.T_F_RETURNS, visitNullable(node.returns));
o.setAttribute(AstState.T_F_TYPE_COMMENT, visitNullableStringOrByteArray(node.typeComment));
o.setAttribute(AstState.T_F_TYPE_PARAMS, seq2List(node.typeParams));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.ClassDef node) {
PythonObject o = createPythonObject(state.clsClassDef);
o.setAttribute(AstState.T_F_NAME, visitNonNull(node.name));
o.setAttribute(AstState.T_F_BASES, seq2List(node.bases));
o.setAttribute(AstState.T_F_KEYWORDS, seq2List(node.keywords));
o.setAttribute(AstState.T_F_BODY, seq2List(node.body));
o.setAttribute(AstState.T_F_DECORATOR_LIST, seq2List(node.decoratorList));
o.setAttribute(AstState.T_F_TYPE_PARAMS, seq2List(node.typeParams));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.Return node) {
PythonObject o = createPythonObject(state.clsReturn);
o.setAttribute(AstState.T_F_VALUE, visitNullable(node.value));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.Delete node) {
PythonObject o = createPythonObject(state.clsDelete);
o.setAttribute(AstState.T_F_TARGETS, seq2List(node.targets));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.Assign node) {
PythonObject o = createPythonObject(state.clsAssign);
o.setAttribute(AstState.T_F_TARGETS, seq2List(node.targets));
o.setAttribute(AstState.T_F_VALUE, visitNonNull(node.value));
o.setAttribute(AstState.T_F_TYPE_COMMENT, visitNullableStringOrByteArray(node.typeComment));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.TypeAlias node) {
PythonObject o = createPythonObject(state.clsTypeAlias);
o.setAttribute(AstState.T_F_NAME, visitNonNull(node.name));
o.setAttribute(AstState.T_F_TYPE_PARAMS, seq2List(node.typeParams));
o.setAttribute(AstState.T_F_VALUE, visitNonNull(node.value));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.AugAssign node) {
PythonObject o = createPythonObject(state.clsAugAssign);
o.setAttribute(AstState.T_F_TARGET, visitNonNull(node.target));
o.setAttribute(AstState.T_F_OP, visitNonNull(node.op));
o.setAttribute(AstState.T_F_VALUE, visitNonNull(node.value));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.AnnAssign node) {
PythonObject o = createPythonObject(state.clsAnnAssign);
o.setAttribute(AstState.T_F_TARGET, visitNonNull(node.target));
o.setAttribute(AstState.T_F_ANNOTATION, visitNonNull(node.annotation));
o.setAttribute(AstState.T_F_VALUE, visitNullable(node.value));
o.setAttribute(AstState.T_F_SIMPLE, visitNonNull(node.isSimple));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.For node) {
PythonObject o = createPythonObject(state.clsFor);
o.setAttribute(AstState.T_F_TARGET, visitNonNull(node.target));
o.setAttribute(AstState.T_F_ITER, visitNonNull(node.iter));
o.setAttribute(AstState.T_F_BODY, seq2List(node.body));
o.setAttribute(AstState.T_F_ORELSE, seq2List(node.orElse));
o.setAttribute(AstState.T_F_TYPE_COMMENT, visitNullableStringOrByteArray(node.typeComment));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.AsyncFor node) {
PythonObject o = createPythonObject(state.clsAsyncFor);
o.setAttribute(AstState.T_F_TARGET, visitNonNull(node.target));
o.setAttribute(AstState.T_F_ITER, visitNonNull(node.iter));
o.setAttribute(AstState.T_F_BODY, seq2List(node.body));
o.setAttribute(AstState.T_F_ORELSE, seq2List(node.orElse));
o.setAttribute(AstState.T_F_TYPE_COMMENT, visitNullableStringOrByteArray(node.typeComment));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.While node) {
PythonObject o = createPythonObject(state.clsWhile);
o.setAttribute(AstState.T_F_TEST, visitNonNull(node.test));
o.setAttribute(AstState.T_F_BODY, seq2List(node.body));
o.setAttribute(AstState.T_F_ORELSE, seq2List(node.orElse));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.If node) {
PythonObject o = createPythonObject(state.clsIf);
o.setAttribute(AstState.T_F_TEST, visitNonNull(node.test));
o.setAttribute(AstState.T_F_BODY, seq2List(node.body));
o.setAttribute(AstState.T_F_ORELSE, seq2List(node.orElse));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.With node) {
PythonObject o = createPythonObject(state.clsWith);
o.setAttribute(AstState.T_F_ITEMS, seq2List(node.items));
o.setAttribute(AstState.T_F_BODY, seq2List(node.body));
o.setAttribute(AstState.T_F_TYPE_COMMENT, visitNullableStringOrByteArray(node.typeComment));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.AsyncWith node) {
PythonObject o = createPythonObject(state.clsAsyncWith);
o.setAttribute(AstState.T_F_ITEMS, seq2List(node.items));
o.setAttribute(AstState.T_F_BODY, seq2List(node.body));
o.setAttribute(AstState.T_F_TYPE_COMMENT, visitNullableStringOrByteArray(node.typeComment));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.Match node) {
PythonObject o = createPythonObject(state.clsMatch);
o.setAttribute(AstState.T_F_SUBJECT, visitNonNull(node.subject));
o.setAttribute(AstState.T_F_CASES, seq2List(node.cases));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.Raise node) {
PythonObject o = createPythonObject(state.clsRaise);
o.setAttribute(AstState.T_F_EXC, visitNullable(node.exc));
o.setAttribute(AstState.T_F_CAUSE, visitNullable(node.cause));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.Try node) {
PythonObject o = createPythonObject(state.clsTry);
o.setAttribute(AstState.T_F_BODY, seq2List(node.body));
o.setAttribute(AstState.T_F_HANDLERS, seq2List(node.handlers));
o.setAttribute(AstState.T_F_ORELSE, seq2List(node.orElse));
o.setAttribute(AstState.T_F_FINALBODY, seq2List(node.finalBody));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.TryStar node) {
PythonObject o = createPythonObject(state.clsTryStar);
o.setAttribute(AstState.T_F_BODY, seq2List(node.body));
o.setAttribute(AstState.T_F_HANDLERS, seq2List(node.handlers));
o.setAttribute(AstState.T_F_ORELSE, seq2List(node.orElse));
o.setAttribute(AstState.T_F_FINALBODY, seq2List(node.finalBody));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.Assert node) {
PythonObject o = createPythonObject(state.clsAssert);
o.setAttribute(AstState.T_F_TEST, visitNonNull(node.test));
o.setAttribute(AstState.T_F_MSG, visitNullable(node.msg));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.Import node) {
PythonObject o = createPythonObject(state.clsImport);
o.setAttribute(AstState.T_F_NAMES, seq2List(node.names));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.ImportFrom node) {
PythonObject o = createPythonObject(state.clsImportFrom);
o.setAttribute(AstState.T_F_MODULE, visitNullable(node.module));
o.setAttribute(AstState.T_F_NAMES, seq2List(node.names));
o.setAttribute(AstState.T_F_LEVEL, visitNullable(node.level));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.Global node) {
PythonObject o = createPythonObject(state.clsGlobal);
o.setAttribute(AstState.T_F_NAMES, seq2List(node.names));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.Nonlocal node) {
PythonObject o = createPythonObject(state.clsNonlocal);
o.setAttribute(AstState.T_F_NAMES, seq2List(node.names));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.Expr node) {
PythonObject o = createPythonObject(state.clsExpr);
o.setAttribute(AstState.T_F_VALUE, visitNonNull(node.value));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.Pass node) {
PythonObject o = createPythonObject(state.clsPass);
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.Break node) {
PythonObject o = createPythonObject(state.clsBreak);
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(StmtTy.Continue node) {
PythonObject o = createPythonObject(state.clsContinue);
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.BoolOp node) {
PythonObject o = createPythonObject(state.clsBoolOp);
o.setAttribute(AstState.T_F_OP, visitNonNull(node.op));
o.setAttribute(AstState.T_F_VALUES, seq2List(node.values));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.NamedExpr node) {
PythonObject o = createPythonObject(state.clsNamedExpr);
o.setAttribute(AstState.T_F_TARGET, visitNonNull(node.target));
o.setAttribute(AstState.T_F_VALUE, visitNonNull(node.value));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.BinOp node) {
PythonObject o = createPythonObject(state.clsBinOp);
o.setAttribute(AstState.T_F_LEFT, visitNonNull(node.left));
o.setAttribute(AstState.T_F_OP, visitNonNull(node.op));
o.setAttribute(AstState.T_F_RIGHT, visitNonNull(node.right));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.UnaryOp node) {
PythonObject o = createPythonObject(state.clsUnaryOp);
o.setAttribute(AstState.T_F_OP, visitNonNull(node.op));
o.setAttribute(AstState.T_F_OPERAND, visitNonNull(node.operand));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.Lambda node) {
PythonObject o = createPythonObject(state.clsLambda);
o.setAttribute(AstState.T_F_ARGS, visitNonNull(node.args));
o.setAttribute(AstState.T_F_BODY, visitNonNull(node.body));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.IfExp node) {
PythonObject o = createPythonObject(state.clsIfExp);
o.setAttribute(AstState.T_F_TEST, visitNonNull(node.test));
o.setAttribute(AstState.T_F_BODY, visitNonNull(node.body));
o.setAttribute(AstState.T_F_ORELSE, visitNonNull(node.orElse));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.Dict node) {
PythonObject o = createPythonObject(state.clsDict);
o.setAttribute(AstState.T_F_KEYS, seq2List(node.keys));
o.setAttribute(AstState.T_F_VALUES, seq2List(node.values));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.Set node) {
PythonObject o = createPythonObject(state.clsSet);
o.setAttribute(AstState.T_F_ELTS, seq2List(node.elements));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.ListComp node) {
PythonObject o = createPythonObject(state.clsListComp);
o.setAttribute(AstState.T_F_ELT, visitNonNull(node.element));
o.setAttribute(AstState.T_F_GENERATORS, seq2List(node.generators));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.SetComp node) {
PythonObject o = createPythonObject(state.clsSetComp);
o.setAttribute(AstState.T_F_ELT, visitNonNull(node.element));
o.setAttribute(AstState.T_F_GENERATORS, seq2List(node.generators));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.DictComp node) {
PythonObject o = createPythonObject(state.clsDictComp);
o.setAttribute(AstState.T_F_KEY, visitNonNull(node.key));
o.setAttribute(AstState.T_F_VALUE, visitNonNull(node.value));
o.setAttribute(AstState.T_F_GENERATORS, seq2List(node.generators));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.GeneratorExp node) {
PythonObject o = createPythonObject(state.clsGeneratorExp);
o.setAttribute(AstState.T_F_ELT, visitNonNull(node.element));
o.setAttribute(AstState.T_F_GENERATORS, seq2List(node.generators));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.Await node) {
PythonObject o = createPythonObject(state.clsAwait);
o.setAttribute(AstState.T_F_VALUE, visitNonNull(node.value));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.Yield node) {
PythonObject o = createPythonObject(state.clsYield);
o.setAttribute(AstState.T_F_VALUE, visitNullable(node.value));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.YieldFrom node) {
PythonObject o = createPythonObject(state.clsYieldFrom);
o.setAttribute(AstState.T_F_VALUE, visitNonNull(node.value));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.Compare node) {
PythonObject o = createPythonObject(state.clsCompare);
o.setAttribute(AstState.T_F_LEFT, visitNonNull(node.left));
o.setAttribute(AstState.T_F_OPS, seq2List(node.ops));
o.setAttribute(AstState.T_F_COMPARATORS, seq2List(node.comparators));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.Call node) {
PythonObject o = createPythonObject(state.clsCall);
o.setAttribute(AstState.T_F_FUNC, visitNonNull(node.func));
o.setAttribute(AstState.T_F_ARGS, seq2List(node.args));
o.setAttribute(AstState.T_F_KEYWORDS, seq2List(node.keywords));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.FormattedValue node) {
PythonObject o = createPythonObject(state.clsFormattedValue);
o.setAttribute(AstState.T_F_VALUE, visitNonNull(node.value));
o.setAttribute(AstState.T_F_CONVERSION, visitNonNull(node.conversion));
o.setAttribute(AstState.T_F_FORMAT_SPEC, visitNullable(node.formatSpec));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.JoinedStr node) {
PythonObject o = createPythonObject(state.clsJoinedStr);
o.setAttribute(AstState.T_F_VALUES, seq2List(node.values));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.Constant node) {
PythonObject o = createPythonObject(state.clsConstant);
o.setAttribute(AstState.T_F_VALUE, visitNonNull(node.value));
o.setAttribute(AstState.T_F_KIND, visitNullableStringOrByteArray(node.kind));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.Attribute node) {
PythonObject o = createPythonObject(state.clsAttribute);
o.setAttribute(AstState.T_F_VALUE, visitNonNull(node.value));
o.setAttribute(AstState.T_F_ATTR, visitNonNull(node.attr));
o.setAttribute(AstState.T_F_CTX, visitNonNull(node.context));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.Subscript node) {
PythonObject o = createPythonObject(state.clsSubscript);
o.setAttribute(AstState.T_F_VALUE, visitNonNull(node.value));
o.setAttribute(AstState.T_F_SLICE, visitNonNull(node.slice));
o.setAttribute(AstState.T_F_CTX, visitNonNull(node.context));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.Starred node) {
PythonObject o = createPythonObject(state.clsStarred);
o.setAttribute(AstState.T_F_VALUE, visitNonNull(node.value));
o.setAttribute(AstState.T_F_CTX, visitNonNull(node.context));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.Name node) {
PythonObject o = createPythonObject(state.clsName);
o.setAttribute(AstState.T_F_ID, visitNonNull(node.id));
o.setAttribute(AstState.T_F_CTX, visitNonNull(node.context));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.List node) {
PythonObject o = createPythonObject(state.clsList);
o.setAttribute(AstState.T_F_ELTS, seq2List(node.elements));
o.setAttribute(AstState.T_F_CTX, visitNonNull(node.context));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.Tuple node) {
PythonObject o = createPythonObject(state.clsTuple);
o.setAttribute(AstState.T_F_ELTS, seq2List(node.elements));
o.setAttribute(AstState.T_F_CTX, visitNonNull(node.context));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ExprTy.Slice node) {
PythonObject o = createPythonObject(state.clsSlice);
o.setAttribute(AstState.T_F_LOWER, visitNullable(node.lower));
o.setAttribute(AstState.T_F_UPPER, visitNullable(node.upper));
o.setAttribute(AstState.T_F_STEP, visitNullable(node.step));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
public Object visitNonNull(ExprContextTy v) {
switch (v) {
case Load:
return state.singletonLoad;
case Store:
return state.singletonStore;
case Del:
return state.singletonDel;
default:
throw shouldNotReachHere();
}
}
public Object visitNonNull(BoolOpTy v) {
switch (v) {
case And:
return state.singletonAnd;
case Or:
return state.singletonOr;
default:
throw shouldNotReachHere();
}
}
public Object visitNonNull(OperatorTy v) {
switch (v) {
case Add:
return state.singletonAdd;
case Sub:
return state.singletonSub;
case Mult:
return state.singletonMult;
case MatMult:
return state.singletonMatMult;
case Div:
return state.singletonDiv;
case Mod:
return state.singletonMod;
case Pow:
return state.singletonPow;
case LShift:
return state.singletonLShift;
case RShift:
return state.singletonRShift;
case BitOr:
return state.singletonBitOr;
case BitXor:
return state.singletonBitXor;
case BitAnd:
return state.singletonBitAnd;
case FloorDiv:
return state.singletonFloorDiv;
default:
throw shouldNotReachHere();
}
}
public Object visitNonNull(UnaryOpTy v) {
switch (v) {
case Invert:
return state.singletonInvert;
case Not:
return state.singletonNot;
case UAdd:
return state.singletonUAdd;
case USub:
return state.singletonUSub;
default:
throw shouldNotReachHere();
}
}
@Override
public Object visitNonNull(CmpOpTy v) {
switch (v) {
case Eq:
return state.singletonEq;
case NotEq:
return state.singletonNotEq;
case Lt:
return state.singletonLt;
case LtE:
return state.singletonLtE;
case Gt:
return state.singletonGt;
case GtE:
return state.singletonGtE;
case Is:
return state.singletonIs;
case IsNot:
return state.singletonIsNot;
case In:
return state.singletonIn;
case NotIn:
return state.singletonNotIn;
default:
throw shouldNotReachHere();
}
}
@Override
public Object visit(ComprehensionTy node) {
PythonObject o = createPythonObject(state.clsComprehensionTy);
o.setAttribute(AstState.T_F_TARGET, visitNonNull(node.target));
o.setAttribute(AstState.T_F_ITER, visitNonNull(node.iter));
o.setAttribute(AstState.T_F_IFS, seq2List(node.ifs));
o.setAttribute(AstState.T_F_IS_ASYNC, visitNonNull(node.isAsync));
return o;
}
@Override
public Object visit(ExceptHandlerTy.ExceptHandler node) {
PythonObject o = createPythonObject(state.clsExceptHandler);
o.setAttribute(AstState.T_F_TYPE, visitNullable(node.type));
o.setAttribute(AstState.T_F_NAME, visitNullable(node.name));
o.setAttribute(AstState.T_F_BODY, seq2List(node.body));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(ArgumentsTy node) {
PythonObject o = createPythonObject(state.clsArgumentsTy);
o.setAttribute(AstState.T_F_POSONLYARGS, seq2List(node.posOnlyArgs));
o.setAttribute(AstState.T_F_ARGS, seq2List(node.args));
o.setAttribute(AstState.T_F_VARARG, visitNullable(node.varArg));
o.setAttribute(AstState.T_F_KWONLYARGS, seq2List(node.kwOnlyArgs));
o.setAttribute(AstState.T_F_KW_DEFAULTS, seq2List(node.kwDefaults));
o.setAttribute(AstState.T_F_KWARG, visitNullable(node.kwArg));
o.setAttribute(AstState.T_F_DEFAULTS, seq2List(node.defaults));
return o;
}
@Override
public Object visit(ArgTy node) {
PythonObject o = createPythonObject(state.clsArgTy);
o.setAttribute(AstState.T_F_ARG, visitNonNull(node.arg));
o.setAttribute(AstState.T_F_ANNOTATION, visitNullable(node.annotation));
o.setAttribute(AstState.T_F_TYPE_COMMENT, visitNullableStringOrByteArray(node.typeComment));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(KeywordTy node) {
PythonObject o = createPythonObject(state.clsKeywordTy);
o.setAttribute(AstState.T_F_ARG, visitNullable(node.arg));
o.setAttribute(AstState.T_F_VALUE, visitNonNull(node.value));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(AliasTy node) {
PythonObject o = createPythonObject(state.clsAliasTy);
o.setAttribute(AstState.T_F_NAME, visitNonNull(node.name));
o.setAttribute(AstState.T_F_ASNAME, visitNullable(node.asName));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(WithItemTy node) {
PythonObject o = createPythonObject(state.clsWithItemTy);
o.setAttribute(AstState.T_F_CONTEXT_EXPR, visitNonNull(node.contextExpr));
o.setAttribute(AstState.T_F_OPTIONAL_VARS, visitNullable(node.optionalVars));
return o;
}
@Override
public Object visit(MatchCaseTy node) {
PythonObject o = createPythonObject(state.clsMatchCaseTy);
o.setAttribute(AstState.T_F_PATTERN, visitNonNull(node.pattern));
o.setAttribute(AstState.T_F_GUARD, visitNullable(node.guard));
o.setAttribute(AstState.T_F_BODY, seq2List(node.body));
return o;
}
@Override
public Object visit(PatternTy.MatchValue node) {
PythonObject o = createPythonObject(state.clsMatchValue);
o.setAttribute(AstState.T_F_VALUE, visitNonNull(node.value));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(PatternTy.MatchSingleton node) {
PythonObject o = createPythonObject(state.clsMatchSingleton);
o.setAttribute(AstState.T_F_VALUE, visitNonNull(node.value));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(PatternTy.MatchSequence node) {
PythonObject o = createPythonObject(state.clsMatchSequence);
o.setAttribute(AstState.T_F_PATTERNS, seq2List(node.patterns));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(PatternTy.MatchMapping node) {
PythonObject o = createPythonObject(state.clsMatchMapping);
o.setAttribute(AstState.T_F_KEYS, seq2List(node.keys));
o.setAttribute(AstState.T_F_PATTERNS, seq2List(node.patterns));
o.setAttribute(AstState.T_F_REST, visitNullable(node.rest));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(PatternTy.MatchClass node) {
PythonObject o = createPythonObject(state.clsMatchClass);
o.setAttribute(AstState.T_F_CLS, visitNonNull(node.cls));
o.setAttribute(AstState.T_F_PATTERNS, seq2List(node.patterns));
o.setAttribute(AstState.T_F_KWD_ATTRS, seq2List(node.kwdAttrs));
o.setAttribute(AstState.T_F_KWD_PATTERNS, seq2List(node.kwdPatterns));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(PatternTy.MatchStar node) {
PythonObject o = createPythonObject(state.clsMatchStar);
o.setAttribute(AstState.T_F_NAME, visitNullable(node.name));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(PatternTy.MatchAs node) {
PythonObject o = createPythonObject(state.clsMatchAs);
o.setAttribute(AstState.T_F_PATTERN, visitNullable(node.pattern));
o.setAttribute(AstState.T_F_NAME, visitNullable(node.name));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(PatternTy.MatchOr node) {
PythonObject o = createPythonObject(state.clsMatchOr);
o.setAttribute(AstState.T_F_PATTERNS, seq2List(node.patterns));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(TypeIgnoreTy.TypeIgnore node) {
PythonObject o = createPythonObject(state.clsTypeIgnore);
o.setAttribute(AstState.T_F_LINENO, visitNonNull(node.lineNo));
o.setAttribute(AstState.T_F_TAG, visitNonNullStringOrByteArray(node.tag));
return o;
}
@Override
public Object visit(TypeParamTy.TypeVar node) {
PythonObject o = createPythonObject(state.clsTypeVar);
o.setAttribute(AstState.T_F_NAME, visitNonNull(node.name));
o.setAttribute(AstState.T_F_BOUND, visitNullable(node.bound));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(TypeParamTy.ParamSpec node) {
PythonObject o = createPythonObject(state.clsParamSpec);
o.setAttribute(AstState.T_F_NAME, visitNonNull(node.name));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
@Override
public Object visit(TypeParamTy.TypeVarTuple node) {
PythonObject o = createPythonObject(state.clsTypeVarTuple);
o.setAttribute(AstState.T_F_NAME, visitNonNull(node.name));
fillSourceRangeAttributes(o, node.getSourceRange());
return o;
}
}
|
googleapis/google-cloud-java | 35,993 | java-dlp/proto-google-cloud-dlp-v2/src/main/java/com/google/privacy/dlp/v2/TableOptions.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/privacy/dlp/v2/storage.proto
// Protobuf Java Version: 3.25.8
package com.google.privacy.dlp.v2;
/**
*
*
* <pre>
* Instructions regarding the table content being inspected.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2.TableOptions}
*/
public final class TableOptions extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.privacy.dlp.v2.TableOptions)
TableOptionsOrBuilder {
private static final long serialVersionUID = 0L;
// Use TableOptions.newBuilder() to construct.
private TableOptions(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TableOptions() {
identifyingFields_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TableOptions();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.privacy.dlp.v2.DlpStorage
.internal_static_google_privacy_dlp_v2_TableOptions_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2.DlpStorage
.internal_static_google_privacy_dlp_v2_TableOptions_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2.TableOptions.class,
com.google.privacy.dlp.v2.TableOptions.Builder.class);
}
public static final int IDENTIFYING_FIELDS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.privacy.dlp.v2.FieldId> identifyingFields_;
/**
*
*
* <pre>
* The columns that are the primary keys for table objects included in
* ContentItem. A copy of this cell's value will stored alongside alongside
* each finding so that the finding can be traced to the specific row it came
* from. No more than 3 may be provided.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.FieldId identifying_fields = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.privacy.dlp.v2.FieldId> getIdentifyingFieldsList() {
return identifyingFields_;
}
/**
*
*
* <pre>
* The columns that are the primary keys for table objects included in
* ContentItem. A copy of this cell's value will stored alongside alongside
* each finding so that the finding can be traced to the specific row it came
* from. No more than 3 may be provided.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.FieldId identifying_fields = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.privacy.dlp.v2.FieldIdOrBuilder>
getIdentifyingFieldsOrBuilderList() {
return identifyingFields_;
}
/**
*
*
* <pre>
* The columns that are the primary keys for table objects included in
* ContentItem. A copy of this cell's value will stored alongside alongside
* each finding so that the finding can be traced to the specific row it came
* from. No more than 3 may be provided.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.FieldId identifying_fields = 1;</code>
*/
@java.lang.Override
public int getIdentifyingFieldsCount() {
return identifyingFields_.size();
}
/**
*
*
* <pre>
* The columns that are the primary keys for table objects included in
* ContentItem. A copy of this cell's value will stored alongside alongside
* each finding so that the finding can be traced to the specific row it came
* from. No more than 3 may be provided.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.FieldId identifying_fields = 1;</code>
*/
@java.lang.Override
public com.google.privacy.dlp.v2.FieldId getIdentifyingFields(int index) {
return identifyingFields_.get(index);
}
/**
*
*
* <pre>
* The columns that are the primary keys for table objects included in
* ContentItem. A copy of this cell's value will stored alongside alongside
* each finding so that the finding can be traced to the specific row it came
* from. No more than 3 may be provided.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.FieldId identifying_fields = 1;</code>
*/
@java.lang.Override
public com.google.privacy.dlp.v2.FieldIdOrBuilder getIdentifyingFieldsOrBuilder(int index) {
return identifyingFields_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < identifyingFields_.size(); i++) {
output.writeMessage(1, identifyingFields_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < identifyingFields_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(1, identifyingFields_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.privacy.dlp.v2.TableOptions)) {
return super.equals(obj);
}
com.google.privacy.dlp.v2.TableOptions other = (com.google.privacy.dlp.v2.TableOptions) obj;
if (!getIdentifyingFieldsList().equals(other.getIdentifyingFieldsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getIdentifyingFieldsCount() > 0) {
hash = (37 * hash) + IDENTIFYING_FIELDS_FIELD_NUMBER;
hash = (53 * hash) + getIdentifyingFieldsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.privacy.dlp.v2.TableOptions parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.TableOptions parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.TableOptions parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.TableOptions parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.TableOptions parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.TableOptions parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.TableOptions parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.TableOptions parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2.TableOptions parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.TableOptions parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2.TableOptions parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.TableOptions parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.privacy.dlp.v2.TableOptions prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Instructions regarding the table content being inspected.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2.TableOptions}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2.TableOptions)
com.google.privacy.dlp.v2.TableOptionsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.privacy.dlp.v2.DlpStorage
.internal_static_google_privacy_dlp_v2_TableOptions_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2.DlpStorage
.internal_static_google_privacy_dlp_v2_TableOptions_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2.TableOptions.class,
com.google.privacy.dlp.v2.TableOptions.Builder.class);
}
// Construct using com.google.privacy.dlp.v2.TableOptions.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (identifyingFieldsBuilder_ == null) {
identifyingFields_ = java.util.Collections.emptyList();
} else {
identifyingFields_ = null;
identifyingFieldsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.privacy.dlp.v2.DlpStorage
.internal_static_google_privacy_dlp_v2_TableOptions_descriptor;
}
@java.lang.Override
public com.google.privacy.dlp.v2.TableOptions getDefaultInstanceForType() {
return com.google.privacy.dlp.v2.TableOptions.getDefaultInstance();
}
@java.lang.Override
public com.google.privacy.dlp.v2.TableOptions build() {
com.google.privacy.dlp.v2.TableOptions result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.privacy.dlp.v2.TableOptions buildPartial() {
com.google.privacy.dlp.v2.TableOptions result =
new com.google.privacy.dlp.v2.TableOptions(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.privacy.dlp.v2.TableOptions result) {
if (identifyingFieldsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
identifyingFields_ = java.util.Collections.unmodifiableList(identifyingFields_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.identifyingFields_ = identifyingFields_;
} else {
result.identifyingFields_ = identifyingFieldsBuilder_.build();
}
}
private void buildPartial0(com.google.privacy.dlp.v2.TableOptions result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.privacy.dlp.v2.TableOptions) {
return mergeFrom((com.google.privacy.dlp.v2.TableOptions) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.privacy.dlp.v2.TableOptions other) {
if (other == com.google.privacy.dlp.v2.TableOptions.getDefaultInstance()) return this;
if (identifyingFieldsBuilder_ == null) {
if (!other.identifyingFields_.isEmpty()) {
if (identifyingFields_.isEmpty()) {
identifyingFields_ = other.identifyingFields_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureIdentifyingFieldsIsMutable();
identifyingFields_.addAll(other.identifyingFields_);
}
onChanged();
}
} else {
if (!other.identifyingFields_.isEmpty()) {
if (identifyingFieldsBuilder_.isEmpty()) {
identifyingFieldsBuilder_.dispose();
identifyingFieldsBuilder_ = null;
identifyingFields_ = other.identifyingFields_;
bitField0_ = (bitField0_ & ~0x00000001);
identifyingFieldsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getIdentifyingFieldsFieldBuilder()
: null;
} else {
identifyingFieldsBuilder_.addAllMessages(other.identifyingFields_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.privacy.dlp.v2.FieldId m =
input.readMessage(
com.google.privacy.dlp.v2.FieldId.parser(), extensionRegistry);
if (identifyingFieldsBuilder_ == null) {
ensureIdentifyingFieldsIsMutable();
identifyingFields_.add(m);
} else {
identifyingFieldsBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.privacy.dlp.v2.FieldId> identifyingFields_ =
java.util.Collections.emptyList();
private void ensureIdentifyingFieldsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
identifyingFields_ =
new java.util.ArrayList<com.google.privacy.dlp.v2.FieldId>(identifyingFields_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.privacy.dlp.v2.FieldId,
com.google.privacy.dlp.v2.FieldId.Builder,
com.google.privacy.dlp.v2.FieldIdOrBuilder>
identifyingFieldsBuilder_;
/**
*
*
* <pre>
* The columns that are the primary keys for table objects included in
* ContentItem. A copy of this cell's value will stored alongside alongside
* each finding so that the finding can be traced to the specific row it came
* from. No more than 3 may be provided.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.FieldId identifying_fields = 1;</code>
*/
public java.util.List<com.google.privacy.dlp.v2.FieldId> getIdentifyingFieldsList() {
if (identifyingFieldsBuilder_ == null) {
return java.util.Collections.unmodifiableList(identifyingFields_);
} else {
return identifyingFieldsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The columns that are the primary keys for table objects included in
* ContentItem. A copy of this cell's value will stored alongside alongside
* each finding so that the finding can be traced to the specific row it came
* from. No more than 3 may be provided.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.FieldId identifying_fields = 1;</code>
*/
public int getIdentifyingFieldsCount() {
if (identifyingFieldsBuilder_ == null) {
return identifyingFields_.size();
} else {
return identifyingFieldsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The columns that are the primary keys for table objects included in
* ContentItem. A copy of this cell's value will stored alongside alongside
* each finding so that the finding can be traced to the specific row it came
* from. No more than 3 may be provided.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.FieldId identifying_fields = 1;</code>
*/
public com.google.privacy.dlp.v2.FieldId getIdentifyingFields(int index) {
if (identifyingFieldsBuilder_ == null) {
return identifyingFields_.get(index);
} else {
return identifyingFieldsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The columns that are the primary keys for table objects included in
* ContentItem. A copy of this cell's value will stored alongside alongside
* each finding so that the finding can be traced to the specific row it came
* from. No more than 3 may be provided.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.FieldId identifying_fields = 1;</code>
*/
public Builder setIdentifyingFields(int index, com.google.privacy.dlp.v2.FieldId value) {
if (identifyingFieldsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIdentifyingFieldsIsMutable();
identifyingFields_.set(index, value);
onChanged();
} else {
identifyingFieldsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The columns that are the primary keys for table objects included in
* ContentItem. A copy of this cell's value will stored alongside alongside
* each finding so that the finding can be traced to the specific row it came
* from. No more than 3 may be provided.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.FieldId identifying_fields = 1;</code>
*/
public Builder setIdentifyingFields(
int index, com.google.privacy.dlp.v2.FieldId.Builder builderForValue) {
if (identifyingFieldsBuilder_ == null) {
ensureIdentifyingFieldsIsMutable();
identifyingFields_.set(index, builderForValue.build());
onChanged();
} else {
identifyingFieldsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The columns that are the primary keys for table objects included in
* ContentItem. A copy of this cell's value will stored alongside alongside
* each finding so that the finding can be traced to the specific row it came
* from. No more than 3 may be provided.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.FieldId identifying_fields = 1;</code>
*/
public Builder addIdentifyingFields(com.google.privacy.dlp.v2.FieldId value) {
if (identifyingFieldsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIdentifyingFieldsIsMutable();
identifyingFields_.add(value);
onChanged();
} else {
identifyingFieldsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The columns that are the primary keys for table objects included in
* ContentItem. A copy of this cell's value will stored alongside alongside
* each finding so that the finding can be traced to the specific row it came
* from. No more than 3 may be provided.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.FieldId identifying_fields = 1;</code>
*/
public Builder addIdentifyingFields(int index, com.google.privacy.dlp.v2.FieldId value) {
if (identifyingFieldsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIdentifyingFieldsIsMutable();
identifyingFields_.add(index, value);
onChanged();
} else {
identifyingFieldsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The columns that are the primary keys for table objects included in
* ContentItem. A copy of this cell's value will stored alongside alongside
* each finding so that the finding can be traced to the specific row it came
* from. No more than 3 may be provided.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.FieldId identifying_fields = 1;</code>
*/
public Builder addIdentifyingFields(com.google.privacy.dlp.v2.FieldId.Builder builderForValue) {
if (identifyingFieldsBuilder_ == null) {
ensureIdentifyingFieldsIsMutable();
identifyingFields_.add(builderForValue.build());
onChanged();
} else {
identifyingFieldsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The columns that are the primary keys for table objects included in
* ContentItem. A copy of this cell's value will stored alongside alongside
* each finding so that the finding can be traced to the specific row it came
* from. No more than 3 may be provided.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.FieldId identifying_fields = 1;</code>
*/
public Builder addIdentifyingFields(
int index, com.google.privacy.dlp.v2.FieldId.Builder builderForValue) {
if (identifyingFieldsBuilder_ == null) {
ensureIdentifyingFieldsIsMutable();
identifyingFields_.add(index, builderForValue.build());
onChanged();
} else {
identifyingFieldsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The columns that are the primary keys for table objects included in
* ContentItem. A copy of this cell's value will stored alongside alongside
* each finding so that the finding can be traced to the specific row it came
* from. No more than 3 may be provided.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.FieldId identifying_fields = 1;</code>
*/
public Builder addAllIdentifyingFields(
java.lang.Iterable<? extends com.google.privacy.dlp.v2.FieldId> values) {
if (identifyingFieldsBuilder_ == null) {
ensureIdentifyingFieldsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, identifyingFields_);
onChanged();
} else {
identifyingFieldsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The columns that are the primary keys for table objects included in
* ContentItem. A copy of this cell's value will stored alongside alongside
* each finding so that the finding can be traced to the specific row it came
* from. No more than 3 may be provided.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.FieldId identifying_fields = 1;</code>
*/
public Builder clearIdentifyingFields() {
if (identifyingFieldsBuilder_ == null) {
identifyingFields_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
identifyingFieldsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The columns that are the primary keys for table objects included in
* ContentItem. A copy of this cell's value will stored alongside alongside
* each finding so that the finding can be traced to the specific row it came
* from. No more than 3 may be provided.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.FieldId identifying_fields = 1;</code>
*/
public Builder removeIdentifyingFields(int index) {
if (identifyingFieldsBuilder_ == null) {
ensureIdentifyingFieldsIsMutable();
identifyingFields_.remove(index);
onChanged();
} else {
identifyingFieldsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The columns that are the primary keys for table objects included in
* ContentItem. A copy of this cell's value will stored alongside alongside
* each finding so that the finding can be traced to the specific row it came
* from. No more than 3 may be provided.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.FieldId identifying_fields = 1;</code>
*/
public com.google.privacy.dlp.v2.FieldId.Builder getIdentifyingFieldsBuilder(int index) {
return getIdentifyingFieldsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The columns that are the primary keys for table objects included in
* ContentItem. A copy of this cell's value will stored alongside alongside
* each finding so that the finding can be traced to the specific row it came
* from. No more than 3 may be provided.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.FieldId identifying_fields = 1;</code>
*/
public com.google.privacy.dlp.v2.FieldIdOrBuilder getIdentifyingFieldsOrBuilder(int index) {
if (identifyingFieldsBuilder_ == null) {
return identifyingFields_.get(index);
} else {
return identifyingFieldsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The columns that are the primary keys for table objects included in
* ContentItem. A copy of this cell's value will stored alongside alongside
* each finding so that the finding can be traced to the specific row it came
* from. No more than 3 may be provided.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.FieldId identifying_fields = 1;</code>
*/
public java.util.List<? extends com.google.privacy.dlp.v2.FieldIdOrBuilder>
getIdentifyingFieldsOrBuilderList() {
if (identifyingFieldsBuilder_ != null) {
return identifyingFieldsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(identifyingFields_);
}
}
/**
*
*
* <pre>
* The columns that are the primary keys for table objects included in
* ContentItem. A copy of this cell's value will stored alongside alongside
* each finding so that the finding can be traced to the specific row it came
* from. No more than 3 may be provided.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.FieldId identifying_fields = 1;</code>
*/
public com.google.privacy.dlp.v2.FieldId.Builder addIdentifyingFieldsBuilder() {
return getIdentifyingFieldsFieldBuilder()
.addBuilder(com.google.privacy.dlp.v2.FieldId.getDefaultInstance());
}
/**
*
*
* <pre>
* The columns that are the primary keys for table objects included in
* ContentItem. A copy of this cell's value will stored alongside alongside
* each finding so that the finding can be traced to the specific row it came
* from. No more than 3 may be provided.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.FieldId identifying_fields = 1;</code>
*/
public com.google.privacy.dlp.v2.FieldId.Builder addIdentifyingFieldsBuilder(int index) {
return getIdentifyingFieldsFieldBuilder()
.addBuilder(index, com.google.privacy.dlp.v2.FieldId.getDefaultInstance());
}
/**
*
*
* <pre>
* The columns that are the primary keys for table objects included in
* ContentItem. A copy of this cell's value will stored alongside alongside
* each finding so that the finding can be traced to the specific row it came
* from. No more than 3 may be provided.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.FieldId identifying_fields = 1;</code>
*/
public java.util.List<com.google.privacy.dlp.v2.FieldId.Builder>
getIdentifyingFieldsBuilderList() {
return getIdentifyingFieldsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.privacy.dlp.v2.FieldId,
com.google.privacy.dlp.v2.FieldId.Builder,
com.google.privacy.dlp.v2.FieldIdOrBuilder>
getIdentifyingFieldsFieldBuilder() {
if (identifyingFieldsBuilder_ == null) {
identifyingFieldsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.privacy.dlp.v2.FieldId,
com.google.privacy.dlp.v2.FieldId.Builder,
com.google.privacy.dlp.v2.FieldIdOrBuilder>(
identifyingFields_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
identifyingFields_ = null;
}
return identifyingFieldsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2.TableOptions)
}
// @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.TableOptions)
private static final com.google.privacy.dlp.v2.TableOptions DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.privacy.dlp.v2.TableOptions();
}
public static com.google.privacy.dlp.v2.TableOptions getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TableOptions> PARSER =
new com.google.protobuf.AbstractParser<TableOptions>() {
@java.lang.Override
public TableOptions parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TableOptions> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TableOptions> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.privacy.dlp.v2.TableOptions getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/jdk8 | 36,154 | jdk/src/share/classes/javax/imageio/metadata/IIOMetadata.java | /*
* Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package javax.imageio.metadata;
import org.w3c.dom.Node;
import java.lang.reflect.Method;
/**
* An abstract class to be extended by objects that represent metadata
* (non-image data) associated with images and streams. Plug-ins
* represent metadata using opaque, plug-in specific objects. These
* objects, however, provide the ability to access their internal
* information as a tree of <code>IIOMetadataNode</code> objects that
* support the XML DOM interfaces as well as additional interfaces for
* storing non-textual data and retrieving information about legal
* data values. The format of such trees is plug-in dependent, but
* plug-ins may choose to support a plug-in neutral format described
* below. A single plug-in may support multiple metadata formats,
* whose names maybe determined by calling
* <code>getMetadataFormatNames</code>. The plug-in may also support
* a single special format, referred to as the "native" format, which
* is designed to encode its metadata losslessly. This format will
* typically be designed specifically to work with a specific file
* format, so that images may be loaded and saved in the same format
* with no loss of metadata, but may be less useful for transferring
* metadata between an <code>ImageReader</code> and an
* <code>ImageWriter</code> for different image formats. To convert
* between two native formats as losslessly as the image file formats
* will allow, an <code>ImageTranscoder</code> object must be used.
*
* @see javax.imageio.ImageReader#getImageMetadata
* @see javax.imageio.ImageReader#getStreamMetadata
* @see javax.imageio.ImageReader#readAll
* @see javax.imageio.ImageWriter#getDefaultStreamMetadata
* @see javax.imageio.ImageWriter#getDefaultImageMetadata
* @see javax.imageio.ImageWriter#write
* @see javax.imageio.ImageWriter#convertImageMetadata
* @see javax.imageio.ImageWriter#convertStreamMetadata
* @see javax.imageio.IIOImage
* @see javax.imageio.ImageTranscoder
*
*/
public abstract class IIOMetadata {
/**
* A boolean indicating whether the concrete subclass supports the
* standard metadata format, set via the constructor.
*/
protected boolean standardFormatSupported;
/**
* The name of the native metadata format for this object,
* initialized to <code>null</code> and set via the constructor.
*/
protected String nativeMetadataFormatName = null;
/**
* The name of the class implementing <code>IIOMetadataFormat</code>
* and representing the native metadata format, initialized to
* <code>null</code> and set via the constructor.
*/
protected String nativeMetadataFormatClassName = null;
/**
* An array of names of formats, other than the standard and
* native formats, that are supported by this plug-in,
* initialized to <code>null</code> and set via the constructor.
*/
protected String[] extraMetadataFormatNames = null;
/**
* An array of names of classes implementing <code>IIOMetadataFormat</code>
* and representing the metadata formats, other than the standard and
* native formats, that are supported by this plug-in,
* initialized to <code>null</code> and set via the constructor.
*/
protected String[] extraMetadataFormatClassNames = null;
/**
* An <code>IIOMetadataController</code> that is suggested for use
* as the controller for this <code>IIOMetadata</code> object. It
* may be retrieved via <code>getDefaultController</code>. To
* install the default controller, call
* <code>setController(getDefaultController())</code>. This
* instance variable should be set by subclasses that choose to
* provide their own default controller, usually a GUI, for
* setting parameters.
*
* @see IIOMetadataController
* @see #getDefaultController
*/
protected IIOMetadataController defaultController = null;
/**
* The <code>IIOMetadataController</code> that will be
* used to provide settings for this <code>IIOMetadata</code>
* object when the <code>activateController</code> method
* is called. This value overrides any default controller,
* even when <code>null</code>.
*
* @see IIOMetadataController
* @see #setController(IIOMetadataController)
* @see #hasController()
* @see #activateController()
*/
protected IIOMetadataController controller = null;
/**
* Constructs an empty <code>IIOMetadata</code> object. The
* subclass is responsible for supplying values for all protected
* instance variables that will allow any non-overridden default
* implementations of methods to satisfy their contracts. For example,
* <code>extraMetadataFormatNames</code> should not have length 0.
*/
protected IIOMetadata() {}
/**
* Constructs an <code>IIOMetadata</code> object with the given
* format names and format class names, as well as a boolean
* indicating whether the standard format is supported.
*
* <p> This constructor does not attempt to check the class names
* for validity. Invalid class names may cause exceptions in
* subsequent calls to <code>getMetadataFormat</code>.
*
* @param standardMetadataFormatSupported <code>true</code> if
* this object can return or accept a DOM tree using the standard
* metadata format.
* @param nativeMetadataFormatName the name of the native metadata
* format, as a <code>String</code>, or <code>null</code> if there
* is no native format.
* @param nativeMetadataFormatClassName the name of the class of
* the native metadata format, or <code>null</code> if there is
* no native format.
* @param extraMetadataFormatNames an array of <code>String</code>s
* indicating additional formats supported by this object, or
* <code>null</code> if there are none.
* @param extraMetadataFormatClassNames an array of <code>String</code>s
* indicating the class names of any additional formats supported by
* this object, or <code>null</code> if there are none.
*
* @exception IllegalArgumentException if
* <code>extraMetadataFormatNames</code> has length 0.
* @exception IllegalArgumentException if
* <code>extraMetadataFormatNames</code> and
* <code>extraMetadataFormatClassNames</code> are neither both
* <code>null</code>, nor of the same length.
*/
protected IIOMetadata(boolean standardMetadataFormatSupported,
String nativeMetadataFormatName,
String nativeMetadataFormatClassName,
String[] extraMetadataFormatNames,
String[] extraMetadataFormatClassNames) {
this.standardFormatSupported = standardMetadataFormatSupported;
this.nativeMetadataFormatName = nativeMetadataFormatName;
this.nativeMetadataFormatClassName = nativeMetadataFormatClassName;
if (extraMetadataFormatNames != null) {
if (extraMetadataFormatNames.length == 0) {
throw new IllegalArgumentException
("extraMetadataFormatNames.length == 0!");
}
if (extraMetadataFormatClassNames == null) {
throw new IllegalArgumentException
("extraMetadataFormatNames != null && extraMetadataFormatClassNames == null!");
}
if (extraMetadataFormatClassNames.length !=
extraMetadataFormatNames.length) {
throw new IllegalArgumentException
("extraMetadataFormatClassNames.length != extraMetadataFormatNames.length!");
}
this.extraMetadataFormatNames =
(String[]) extraMetadataFormatNames.clone();
this.extraMetadataFormatClassNames =
(String[]) extraMetadataFormatClassNames.clone();
} else {
if (extraMetadataFormatClassNames != null) {
throw new IllegalArgumentException
("extraMetadataFormatNames == null && extraMetadataFormatClassNames != null!");
}
}
}
/**
* Returns <code>true</code> if the standard metadata format is
* supported by <code>getMetadataFormat</code>,
* <code>getAsTree</code>, <code>setFromTree</code>, and
* <code>mergeTree</code>.
*
* <p> The default implementation returns the value of the
* <code>standardFormatSupported</code> instance variable.
*
* @return <code>true</code> if the standard metadata format
* is supported.
*
* @see #getAsTree
* @see #setFromTree
* @see #mergeTree
* @see #getMetadataFormat
*/
public boolean isStandardMetadataFormatSupported() {
return standardFormatSupported;
}
/**
* Returns <code>true</code> if this object does not support the
* <code>mergeTree</code>, <code>setFromTree</code>, and
* <code>reset</code> methods.
*
* @return true if this <code>IIOMetadata</code> object cannot be
* modified.
*/
public abstract boolean isReadOnly();
/**
* Returns the name of the "native" metadata format for this
* plug-in, which typically allows for lossless encoding and
* transmission of the metadata stored in the format handled by
* this plug-in. If no such format is supported,
* <code>null</code>will be returned.
*
* <p> The structure and contents of the "native" metadata format
* are defined by the plug-in that created this
* <code>IIOMetadata</code> object. Plug-ins for simple formats
* will usually create a dummy node for the root, and then a
* series of child nodes representing individual tags, chunks, or
* keyword/value pairs. A plug-in may choose whether or not to
* document its native format.
*
* <p> The default implementation returns the value of the
* <code>nativeMetadataFormatName</code> instance variable.
*
* @return the name of the native format, or <code>null</code>.
*
* @see #getExtraMetadataFormatNames
* @see #getMetadataFormatNames
*/
public String getNativeMetadataFormatName() {
return nativeMetadataFormatName;
}
/**
* Returns an array of <code>String</code>s containing the names
* of additional metadata formats, other than the native and standard
* formats, recognized by this plug-in's
* <code>getAsTree</code>, <code>setFromTree</code>, and
* <code>mergeTree</code> methods. If there are no such additional
* formats, <code>null</code> is returned.
*
* <p> The default implementation returns a clone of the
* <code>extraMetadataFormatNames</code> instance variable.
*
* @return an array of <code>String</code>s with length at least
* 1, or <code>null</code>.
*
* @see #getAsTree
* @see #setFromTree
* @see #mergeTree
* @see #getNativeMetadataFormatName
* @see #getMetadataFormatNames
*/
public String[] getExtraMetadataFormatNames() {
if (extraMetadataFormatNames == null) {
return null;
}
return (String[])extraMetadataFormatNames.clone();
}
/**
* Returns an array of <code>String</code>s containing the names
* of all metadata formats, including the native and standard
* formats, recognized by this plug-in's <code>getAsTree</code>,
* <code>setFromTree</code>, and <code>mergeTree</code> methods.
* If there are no such formats, <code>null</code> is returned.
*
* <p> The default implementation calls
* <code>getNativeMetadataFormatName</code>,
* <code>isStandardMetadataFormatSupported</code>, and
* <code>getExtraMetadataFormatNames</code> and returns the
* combined results.
*
* @return an array of <code>String</code>s.
*
* @see #getNativeMetadataFormatName
* @see #isStandardMetadataFormatSupported
* @see #getExtraMetadataFormatNames
*/
public String[] getMetadataFormatNames() {
String nativeName = getNativeMetadataFormatName();
String standardName = isStandardMetadataFormatSupported() ?
IIOMetadataFormatImpl.standardMetadataFormatName : null;
String[] extraNames = getExtraMetadataFormatNames();
int numFormats = 0;
if (nativeName != null) {
++numFormats;
}
if (standardName != null) {
++numFormats;
}
if (extraNames != null) {
numFormats += extraNames.length;
}
if (numFormats == 0) {
return null;
}
String[] formats = new String[numFormats];
int index = 0;
if (nativeName != null) {
formats[index++] = nativeName;
}
if (standardName != null) {
formats[index++] = standardName;
}
if (extraNames != null) {
for (int i = 0; i < extraNames.length; i++) {
formats[index++] = extraNames[i];
}
}
return formats;
}
/**
* Returns an <code>IIOMetadataFormat</code> object describing the
* given metadata format, or <code>null</code> if no description
* is available. The supplied name must be one of those returned
* by <code>getMetadataFormatNames</code> (<i>i.e.</i>, either the
* native format name, the standard format name, or one of those
* returned by <code>getExtraMetadataFormatNames</code>).
*
* <p> The default implementation checks the name against the
* global standard metadata format name, and returns that format
* if it is supported. Otherwise, it checks against the native
* format names followed by any additional format names. If a
* match is found, it retrieves the name of the
* <code>IIOMetadataFormat</code> class from
* <code>nativeMetadataFormatClassName</code> or
* <code>extraMetadataFormatClassNames</code> as appropriate, and
* constructs an instance of that class using its
* <code>getInstance</code> method.
*
* @param formatName the desired metadata format.
*
* @return an <code>IIOMetadataFormat</code> object.
*
* @exception IllegalArgumentException if <code>formatName</code>
* is <code>null</code> or is not one of the names recognized by
* the plug-in.
* @exception IllegalStateException if the class corresponding to
* the format name cannot be loaded.
*/
public IIOMetadataFormat getMetadataFormat(String formatName) {
if (formatName == null) {
throw new IllegalArgumentException("formatName == null!");
}
if (standardFormatSupported
&& formatName.equals
(IIOMetadataFormatImpl.standardMetadataFormatName)) {
return IIOMetadataFormatImpl.getStandardFormatInstance();
}
String formatClassName = null;
if (formatName.equals(nativeMetadataFormatName)) {
formatClassName = nativeMetadataFormatClassName;
} else if (extraMetadataFormatNames != null) {
for (int i = 0; i < extraMetadataFormatNames.length; i++) {
if (formatName.equals(extraMetadataFormatNames[i])) {
formatClassName = extraMetadataFormatClassNames[i];
break; // out of for
}
}
}
if (formatClassName == null) {
throw new IllegalArgumentException("Unsupported format name");
}
try {
Class cls = null;
final Object o = this;
// firstly we try to use classloader used for loading
// the IIOMetadata implemantation for this plugin.
ClassLoader loader = (ClassLoader)
java.security.AccessController.doPrivileged(
new java.security.PrivilegedAction() {
public Object run() {
return o.getClass().getClassLoader();
}
});
try {
cls = Class.forName(formatClassName, true,
loader);
} catch (ClassNotFoundException e) {
// we failed to load IIOMetadataFormat class by
// using IIOMetadata classloader.Next try is to
// use thread context classloader.
loader = (ClassLoader)
java.security.AccessController.doPrivileged(
new java.security.PrivilegedAction() {
public Object run() {
return Thread.currentThread().getContextClassLoader();
}
});
try {
cls = Class.forName(formatClassName, true,
loader);
} catch (ClassNotFoundException e1) {
// finally we try to use system classloader in case
// if we failed to load IIOMetadataFormat implementation
// class above.
cls = Class.forName(formatClassName, true,
ClassLoader.getSystemClassLoader());
}
}
Method meth = cls.getMethod("getInstance");
return (IIOMetadataFormat) meth.invoke(null);
} catch (Exception e) {
RuntimeException ex =
new IllegalStateException ("Can't obtain format");
ex.initCause(e);
throw ex;
}
}
/**
* Returns an XML DOM <code>Node</code> object that represents the
* root of a tree of metadata contained within this object
* according to the conventions defined by a given metadata
* format.
*
* <p> The names of the available metadata formats may be queried
* using the <code>getMetadataFormatNames</code> method.
*
* @param formatName the desired metadata format.
*
* @return an XML DOM <code>Node</code> object forming the
* root of a tree.
*
* @exception IllegalArgumentException if <code>formatName</code>
* is <code>null</code> or is not one of the names returned by
* <code>getMetadataFormatNames</code>.
*
* @see #getMetadataFormatNames
* @see #setFromTree
* @see #mergeTree
*/
public abstract Node getAsTree(String formatName);
/**
* Alters the internal state of this <code>IIOMetadata</code>
* object from a tree of XML DOM <code>Node</code>s whose syntax
* is defined by the given metadata format. The previous state is
* altered only as necessary to accommodate the nodes that are
* present in the given tree. If the tree structure or contents
* are invalid, an <code>IIOInvalidTreeException</code> will be
* thrown.
*
* <p> As the semantics of how a tree or subtree may be merged with
* another tree are completely format-specific, plug-in authors may
* implement this method in whatever manner is most appropriate for
* the format, including simply replacing all existing state with the
* contents of the given tree.
*
* @param formatName the desired metadata format.
* @param root an XML DOM <code>Node</code> object forming the
* root of a tree.
*
* @exception IllegalStateException if this object is read-only.
* @exception IllegalArgumentException if <code>formatName</code>
* is <code>null</code> or is not one of the names returned by
* <code>getMetadataFormatNames</code>.
* @exception IllegalArgumentException if <code>root</code> is
* <code>null</code>.
* @exception IIOInvalidTreeException if the tree cannot be parsed
* successfully using the rules of the given format.
*
* @see #getMetadataFormatNames
* @see #getAsTree
* @see #setFromTree
*/
public abstract void mergeTree(String formatName, Node root)
throws IIOInvalidTreeException;
/**
* Returns an <code>IIOMetadataNode</code> representing the chroma
* information of the standard <code>javax_imageio_1.0</code>
* metadata format, or <code>null</code> if no such information is
* available. This method is intended to be called by the utility
* routine <code>getStandardTree</code>.
*
* <p> The default implementation returns <code>null</code>.
*
* <p> Subclasses should override this method to produce an
* appropriate subtree if they wish to support the standard
* metadata format.
*
* @return an <code>IIOMetadataNode</code>, or <code>null</code>.
*
* @see #getStandardTree
*/
protected IIOMetadataNode getStandardChromaNode() {
return null;
}
/**
* Returns an <code>IIOMetadataNode</code> representing the
* compression information of the standard
* <code>javax_imageio_1.0</code> metadata format, or
* <code>null</code> if no such information is available. This
* method is intended to be called by the utility routine
* <code>getStandardTree</code>.
*
* <p> The default implementation returns <code>null</code>.
*
* <p> Subclasses should override this method to produce an
* appropriate subtree if they wish to support the standard
* metadata format.
*
* @return an <code>IIOMetadataNode</code>, or <code>null</code>.
*
* @see #getStandardTree
*/
protected IIOMetadataNode getStandardCompressionNode() {
return null;
}
/**
* Returns an <code>IIOMetadataNode</code> representing the data
* format information of the standard
* <code>javax_imageio_1.0</code> metadata format, or
* <code>null</code> if no such information is available. This
* method is intended to be called by the utility routine
* <code>getStandardTree</code>.
*
* <p> The default implementation returns <code>null</code>.
*
* <p> Subclasses should override this method to produce an
* appropriate subtree if they wish to support the standard
* metadata format.
*
* @return an <code>IIOMetadataNode</code>, or <code>null</code>.
*
* @see #getStandardTree
*/
protected IIOMetadataNode getStandardDataNode() {
return null;
}
/**
* Returns an <code>IIOMetadataNode</code> representing the
* dimension information of the standard
* <code>javax_imageio_1.0</code> metadata format, or
* <code>null</code> if no such information is available. This
* method is intended to be called by the utility routine
* <code>getStandardTree</code>.
*
* <p> The default implementation returns <code>null</code>.
*
* <p> Subclasses should override this method to produce an
* appropriate subtree if they wish to support the standard
* metadata format.
*
* @return an <code>IIOMetadataNode</code>, or <code>null</code>.
*
* @see #getStandardTree
*/
protected IIOMetadataNode getStandardDimensionNode() {
return null;
}
/**
* Returns an <code>IIOMetadataNode</code> representing the document
* information of the standard <code>javax_imageio_1.0</code>
* metadata format, or <code>null</code> if no such information is
* available. This method is intended to be called by the utility
* routine <code>getStandardTree</code>.
*
* <p> The default implementation returns <code>null</code>.
*
* <p> Subclasses should override this method to produce an
* appropriate subtree if they wish to support the standard
* metadata format.
*
* @return an <code>IIOMetadataNode</code>, or <code>null</code>.
*
* @see #getStandardTree
*/
protected IIOMetadataNode getStandardDocumentNode() {
return null;
}
/**
* Returns an <code>IIOMetadataNode</code> representing the textual
* information of the standard <code>javax_imageio_1.0</code>
* metadata format, or <code>null</code> if no such information is
* available. This method is intended to be called by the utility
* routine <code>getStandardTree</code>.
*
* <p> The default implementation returns <code>null</code>.
*
* <p> Subclasses should override this method to produce an
* appropriate subtree if they wish to support the standard
* metadata format.
*
* @return an <code>IIOMetadataNode</code>, or <code>null</code>.
*
* @see #getStandardTree
*/
protected IIOMetadataNode getStandardTextNode() {
return null;
}
/**
* Returns an <code>IIOMetadataNode</code> representing the tiling
* information of the standard <code>javax_imageio_1.0</code>
* metadata format, or <code>null</code> if no such information is
* available. This method is intended to be called by the utility
* routine <code>getStandardTree</code>.
*
* <p> The default implementation returns <code>null</code>.
*
* <p> Subclasses should override this method to produce an
* appropriate subtree if they wish to support the standard
* metadata format.
*
* @return an <code>IIOMetadataNode</code>, or <code>null</code>.
*
* @see #getStandardTree
*/
protected IIOMetadataNode getStandardTileNode() {
return null;
}
/**
* Returns an <code>IIOMetadataNode</code> representing the
* transparency information of the standard
* <code>javax_imageio_1.0</code> metadata format, or
* <code>null</code> if no such information is available. This
* method is intended to be called by the utility routine
* <code>getStandardTree</code>.
*
* <p> The default implementation returns <code>null</code>.
*
* <p> Subclasses should override this method to produce an
* appropriate subtree if they wish to support the standard
* metadata format.
*
* @return an <code>IIOMetadataNode</code>, or <code>null</code>.
*/
protected IIOMetadataNode getStandardTransparencyNode() {
return null;
}
/**
* Appends a new node to an existing node, if the new node is
* non-<code>null</code>.
*/
private void append(IIOMetadataNode root, IIOMetadataNode node) {
if (node != null) {
root.appendChild(node);
}
}
/**
* A utility method to return a tree of
* <code>IIOMetadataNode</code>s representing the metadata
* contained within this object according to the conventions of
* the standard <code>javax_imageio_1.0</code> metadata format.
*
* <p> This method calls the various <code>getStandard*Node</code>
* methods to supply each of the subtrees rooted at the children
* of the root node. If any of those methods returns
* <code>null</code>, the corresponding subtree will be omitted.
* If all of them return <code>null</code>, a tree consisting of a
* single root node will be returned.
*
* @return an <code>IIOMetadataNode</code> representing the root
* of a metadata tree in the <code>javax_imageio_1.0</code>
* format.
*
* @see #getStandardChromaNode
* @see #getStandardCompressionNode
* @see #getStandardDataNode
* @see #getStandardDimensionNode
* @see #getStandardDocumentNode
* @see #getStandardTextNode
* @see #getStandardTileNode
* @see #getStandardTransparencyNode
*/
protected final IIOMetadataNode getStandardTree() {
IIOMetadataNode root = new IIOMetadataNode
(IIOMetadataFormatImpl.standardMetadataFormatName);
append(root, getStandardChromaNode());
append(root, getStandardCompressionNode());
append(root, getStandardDataNode());
append(root, getStandardDimensionNode());
append(root, getStandardDocumentNode());
append(root, getStandardTextNode());
append(root, getStandardTileNode());
append(root, getStandardTransparencyNode());
return root;
}
/**
* Sets the internal state of this <code>IIOMetadata</code> object
* from a tree of XML DOM <code>Node</code>s whose syntax is
* defined by the given metadata format. The previous state is
* discarded. If the tree's structure or contents are invalid, an
* <code>IIOInvalidTreeException</code> will be thrown.
*
* <p> The default implementation calls <code>reset</code>
* followed by <code>mergeTree(formatName, root)</code>.
*
* @param formatName the desired metadata format.
* @param root an XML DOM <code>Node</code> object forming the
* root of a tree.
*
* @exception IllegalStateException if this object is read-only.
* @exception IllegalArgumentException if <code>formatName</code>
* is <code>null</code> or is not one of the names returned by
* <code>getMetadataFormatNames</code>.
* @exception IllegalArgumentException if <code>root</code> is
* <code>null</code>.
* @exception IIOInvalidTreeException if the tree cannot be parsed
* successfully using the rules of the given format.
*
* @see #getMetadataFormatNames
* @see #getAsTree
* @see #mergeTree
*/
public void setFromTree(String formatName, Node root)
throws IIOInvalidTreeException {
reset();
mergeTree(formatName, root);
}
/**
* Resets all the data stored in this object to default values,
* usually to the state this object was in immediately after
* construction, though the precise semantics are plug-in specific.
* Note that there are many possible default values, depending on
* how the object was created.
*
* @exception IllegalStateException if this object is read-only.
*
* @see javax.imageio.ImageReader#getStreamMetadata
* @see javax.imageio.ImageReader#getImageMetadata
* @see javax.imageio.ImageWriter#getDefaultStreamMetadata
* @see javax.imageio.ImageWriter#getDefaultImageMetadata
*/
public abstract void reset();
/**
* Sets the <code>IIOMetadataController</code> to be used
* to provide settings for this <code>IIOMetadata</code>
* object when the <code>activateController</code> method
* is called, overriding any default controller. If the
* argument is <code>null</code>, no controller will be
* used, including any default. To restore the default, use
* <code>setController(getDefaultController())</code>.
*
* <p> The default implementation sets the <code>controller</code>
* instance variable to the supplied value.
*
* @param controller An appropriate
* <code>IIOMetadataController</code>, or <code>null</code>.
*
* @see IIOMetadataController
* @see #getController
* @see #getDefaultController
* @see #hasController
* @see #activateController()
*/
public void setController(IIOMetadataController controller) {
this.controller = controller;
}
/**
* Returns whatever <code>IIOMetadataController</code> is currently
* installed. This could be the default if there is one,
* <code>null</code>, or the argument of the most recent call
* to <code>setController</code>.
*
* <p> The default implementation returns the value of the
* <code>controller</code> instance variable.
*
* @return the currently installed
* <code>IIOMetadataController</code>, or <code>null</code>.
*
* @see IIOMetadataController
* @see #setController
* @see #getDefaultController
* @see #hasController
* @see #activateController()
*/
public IIOMetadataController getController() {
return controller;
}
/**
* Returns the default <code>IIOMetadataController</code>, if there
* is one, regardless of the currently installed controller. If
* there is no default controller, returns <code>null</code>.
*
* <p> The default implementation returns the value of the
* <code>defaultController</code> instance variable.
*
* @return the default <code>IIOMetadataController</code>, or
* <code>null</code>.
*
* @see IIOMetadataController
* @see #setController(IIOMetadataController)
* @see #getController
* @see #hasController
* @see #activateController()
*/
public IIOMetadataController getDefaultController() {
return defaultController;
}
/**
* Returns <code>true</code> if there is a controller installed
* for this <code>IIOMetadata</code> object.
*
* <p> The default implementation returns <code>true</code> if the
* <code>getController</code> method returns a
* non-<code>null</code> value.
*
* @return <code>true</code> if a controller is installed.
*
* @see IIOMetadataController
* @see #setController(IIOMetadataController)
* @see #getController
* @see #getDefaultController
* @see #activateController()
*/
public boolean hasController() {
return (getController() != null);
}
/**
* Activates the installed <code>IIOMetadataController</code> for
* this <code>IIOMetadata</code> object and returns the resulting
* value. When this method returns <code>true</code>, all values for this
* <code>IIOMetadata</code> object will be ready for the next write
* operation. If <code>false</code> is
* returned, no settings in this object will have been disturbed
* (<i>i.e.</i>, the user canceled the operation).
*
* <p> Ordinarily, the controller will be a GUI providing a user
* interface for a subclass of <code>IIOMetadata</code> for a
* particular plug-in. Controllers need not be GUIs, however.
*
* <p> The default implementation calls <code>getController</code>
* and the calls <code>activate</code> on the returned object if
* <code>hasController</code> returns <code>true</code>.
*
* @return <code>true</code> if the controller completed normally.
*
* @exception IllegalStateException if there is no controller
* currently installed.
*
* @see IIOMetadataController
* @see #setController(IIOMetadataController)
* @see #getController
* @see #getDefaultController
* @see #hasController
*/
public boolean activateController() {
if (!hasController()) {
throw new IllegalStateException("hasController() == false!");
}
return getController().activate(this);
}
}
|
apache/flink | 36,090 | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/TableImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.internal;
import org.apache.flink.annotation.Internal;
import org.apache.flink.table.api.AggregatedTable;
import org.apache.flink.table.api.ApiExpression;
import org.apache.flink.table.api.ExplainDetail;
import org.apache.flink.table.api.ExplainFormat;
import org.apache.flink.table.api.FlatAggregateTable;
import org.apache.flink.table.api.GroupWindow;
import org.apache.flink.table.api.GroupWindowedTable;
import org.apache.flink.table.api.GroupedTable;
import org.apache.flink.table.api.OverWindow;
import org.apache.flink.table.api.OverWindowedTable;
import org.apache.flink.table.api.PartitionedTable;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableDescriptor;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.TableException;
import org.apache.flink.table.api.TablePipeline;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.ValidationException;
import org.apache.flink.table.api.WindowGroupedTable;
import org.apache.flink.table.catalog.ContextResolvedTable;
import org.apache.flink.table.catalog.FunctionLookup;
import org.apache.flink.table.catalog.ObjectIdentifier;
import org.apache.flink.table.catalog.ResolvedCatalogTable;
import org.apache.flink.table.catalog.ResolvedSchema;
import org.apache.flink.table.catalog.SchemaTranslator;
import org.apache.flink.table.catalog.UnresolvedIdentifier;
import org.apache.flink.table.expressions.ApiExpressionUtils;
import org.apache.flink.table.expressions.Expression;
import org.apache.flink.table.expressions.UnresolvedReferenceExpression;
import org.apache.flink.table.expressions.resolver.LookupCallResolver;
import org.apache.flink.table.functions.BuiltInFunctionDefinitions;
import org.apache.flink.table.functions.TemporalTableFunction;
import org.apache.flink.table.functions.TemporalTableFunctionImpl;
import org.apache.flink.table.functions.UserDefinedFunction;
import org.apache.flink.table.operations.JoinQueryOperation.JoinType;
import org.apache.flink.table.operations.QueryOperation;
import org.apache.flink.table.operations.SinkModifyOperation;
import org.apache.flink.table.operations.utils.OperationExpressionsUtils;
import org.apache.flink.table.operations.utils.OperationExpressionsUtils.CategorizedExpressions;
import org.apache.flink.table.operations.utils.OperationTreeBuilder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.apache.flink.table.api.Expressions.lit;
/** Implementation for {@link Table}. */
@Internal
public class TableImpl implements Table {
private static final AtomicInteger uniqueId = new AtomicInteger(0);
private final TableEnvironmentInternal tableEnvironment;
private final QueryOperation operationTree;
private final OperationTreeBuilder operationTreeBuilder;
private final LookupCallResolver lookupResolver;
private String tableName = null;
public TableEnvironment getTableEnvironment() {
return tableEnvironment;
}
private TableImpl(
TableEnvironmentInternal tableEnvironment,
QueryOperation operationTree,
OperationTreeBuilder operationTreeBuilder,
LookupCallResolver lookupResolver) {
this.tableEnvironment = tableEnvironment;
this.operationTree = operationTree;
this.operationTreeBuilder = operationTreeBuilder;
this.lookupResolver = lookupResolver;
}
public static TableImpl createTable(
TableEnvironmentInternal tableEnvironment,
QueryOperation operationTree,
OperationTreeBuilder operationTreeBuilder,
FunctionLookup functionLookup) {
return new TableImpl(
tableEnvironment,
operationTree,
operationTreeBuilder,
new LookupCallResolver(functionLookup));
}
@Override
public ResolvedSchema getResolvedSchema() {
return operationTree.getResolvedSchema();
}
@Override
public void printSchema() {
System.out.println(getResolvedSchema());
}
@Override
public QueryOperation getQueryOperation() {
return operationTree;
}
@Override
public Table select(Expression... fields) {
List<Expression> expressionsWithResolvedCalls = preprocessExpressions(fields);
CategorizedExpressions extracted =
OperationExpressionsUtils.extractAggregationsAndProperties(
expressionsWithResolvedCalls);
if (!extracted.getWindowProperties().isEmpty()) {
throw new ValidationException("Window properties can only be used on windowed tables.");
}
if (!extracted.getAggregations().isEmpty()) {
QueryOperation aggregate =
operationTreeBuilder.aggregate(
Collections.emptyList(), extracted.getAggregations(), operationTree);
return createTable(
operationTreeBuilder.project(extracted.getProjections(), aggregate, false));
} else {
return createTable(
operationTreeBuilder.project(
expressionsWithResolvedCalls, operationTree, false));
}
}
@Override
public TemporalTableFunction createTemporalTableFunction(
Expression timeAttribute, Expression primaryKey) {
Expression resolvedTimeAttribute =
operationTreeBuilder.resolveExpression(timeAttribute, operationTree);
Expression resolvedPrimaryKey =
operationTreeBuilder.resolveExpression(primaryKey, operationTree);
return TemporalTableFunctionImpl.create(
operationTree, resolvedTimeAttribute, resolvedPrimaryKey);
}
@Override
public Table as(String field, String... fields) {
final List<Expression> fieldsExprs = new ArrayList<>();
fieldsExprs.add(lit(field));
for (String extraField : fields) {
fieldsExprs.add(lit(extraField));
}
return createTable(operationTreeBuilder.alias(fieldsExprs, operationTree));
}
@Override
public Table as(Expression... fields) {
return createTable(operationTreeBuilder.alias(Arrays.asList(fields), operationTree));
}
@Override
public Table filter(Expression predicate) {
Expression resolvedCallPredicate = predicate.accept(lookupResolver);
return createTable(operationTreeBuilder.filter(resolvedCallPredicate, operationTree));
}
@Override
public Table where(Expression predicate) {
return filter(predicate);
}
@Override
public GroupedTable groupBy(Expression... fields) {
return new GroupedTableImpl(this, Arrays.asList(fields));
}
@Override
public Table distinct() {
return createTable(operationTreeBuilder.distinct(operationTree));
}
@Override
public Table join(Table right) {
return joinInternal(right, Optional.empty(), JoinType.INNER);
}
@Override
public Table join(Table right, Expression joinPredicate) {
return joinInternal(right, Optional.of(joinPredicate), JoinType.INNER);
}
@Override
public Table leftOuterJoin(Table right) {
return joinInternal(right, Optional.empty(), JoinType.LEFT_OUTER);
}
@Override
public Table leftOuterJoin(Table right, Expression joinPredicate) {
return joinInternal(right, Optional.of(joinPredicate), JoinType.LEFT_OUTER);
}
@Override
public Table rightOuterJoin(Table right, Expression joinPredicate) {
return joinInternal(right, Optional.of(joinPredicate), JoinType.RIGHT_OUTER);
}
@Override
public Table fullOuterJoin(Table right, Expression joinPredicate) {
return joinInternal(right, Optional.of(joinPredicate), JoinType.FULL_OUTER);
}
private TableImpl joinInternal(
Table right, Optional<Expression> joinPredicate, JoinType joinType) {
checkCommonTableEnvironment(right);
return createTable(
operationTreeBuilder.join(
this.operationTree,
right.getQueryOperation(),
joinType,
joinPredicate,
false));
}
@Override
public Table joinLateral(Expression tableFunctionCall) {
return joinLateralInternal(tableFunctionCall, Optional.empty(), JoinType.INNER);
}
@Override
public Table joinLateral(Expression tableFunctionCall, Expression joinPredicate) {
return joinLateralInternal(tableFunctionCall, Optional.of(joinPredicate), JoinType.INNER);
}
@Override
public Table leftOuterJoinLateral(Expression tableFunctionCall) {
return joinLateralInternal(tableFunctionCall, Optional.empty(), JoinType.LEFT_OUTER);
}
@Override
public Table leftOuterJoinLateral(Expression tableFunctionCall, Expression joinPredicate) {
return joinLateralInternal(
tableFunctionCall, Optional.of(joinPredicate), JoinType.LEFT_OUTER);
}
private TableImpl joinLateralInternal(
Expression callExpr, Optional<Expression> joinPredicate, JoinType joinType) {
// check join type
if (joinType != JoinType.INNER && joinType != JoinType.LEFT_OUTER) {
throw new ValidationException(
"Table functions are currently only supported for inner and left outer lateral joins.");
}
return createTable(
operationTreeBuilder.joinLateral(
this.operationTree, callExpr, joinType, joinPredicate));
}
@Override
public Table minus(Table right) {
checkCommonTableEnvironment(right);
return createTable(
operationTreeBuilder.minus(operationTree, right.getQueryOperation(), false));
}
@Override
public Table minusAll(Table right) {
checkCommonTableEnvironment(right);
return createTable(
operationTreeBuilder.minus(operationTree, right.getQueryOperation(), true));
}
@Override
public Table union(Table right) {
checkCommonTableEnvironment(right);
return createTable(
operationTreeBuilder.union(operationTree, right.getQueryOperation(), false));
}
@Override
public Table unionAll(Table right) {
checkCommonTableEnvironment(right);
return createTable(
operationTreeBuilder.union(operationTree, right.getQueryOperation(), true));
}
@Override
public Table intersect(Table right) {
checkCommonTableEnvironment(right);
return createTable(
operationTreeBuilder.intersect(operationTree, right.getQueryOperation(), false));
}
@Override
public Table intersectAll(Table right) {
checkCommonTableEnvironment(right);
return createTable(
operationTreeBuilder.intersect(operationTree, right.getQueryOperation(), true));
}
@Override
public Table orderBy(Expression... fields) {
return createTable(operationTreeBuilder.sort(Arrays.asList(fields), operationTree));
}
@Override
public Table offset(int offset) {
return createTable(operationTreeBuilder.limitWithOffset(offset, operationTree));
}
@Override
public Table fetch(int fetch) {
if (fetch < 0) {
throw new ValidationException("FETCH count must be equal or larger than 0.");
}
return createTable(operationTreeBuilder.limitWithFetch(fetch, operationTree));
}
@Override
public GroupWindowedTable window(GroupWindow groupWindow) {
return new GroupWindowedTableImpl(this, groupWindow);
}
@Override
public OverWindowedTable window(OverWindow... overWindows) {
if (overWindows.length != 1) {
throw new TableException("Currently, only a single over window is supported.");
}
return new OverWindowedTableImpl(this, Arrays.asList(overWindows));
}
@Override
public Table addColumns(Expression... fields) {
return addColumnsOperation(false, Arrays.asList(fields));
}
@Override
public Table addOrReplaceColumns(Expression... fields) {
return addColumnsOperation(true, Arrays.asList(fields));
}
private Table addColumnsOperation(boolean replaceIfExist, List<Expression> fields) {
List<Expression> expressionsWithResolvedCalls = preprocessExpressions(fields);
CategorizedExpressions extracted =
OperationExpressionsUtils.extractAggregationsAndProperties(
expressionsWithResolvedCalls);
List<Expression> aggNames = extracted.getAggregations();
if (!aggNames.isEmpty()) {
throw new ValidationException(
"The added field expression cannot be an aggregation, found: "
+ aggNames.get(0));
}
return createTable(
operationTreeBuilder.addColumns(
replaceIfExist, expressionsWithResolvedCalls, operationTree));
}
@Override
public Table renameColumns(Expression... fields) {
return createTable(
operationTreeBuilder.renameColumns(Arrays.asList(fields), operationTree));
}
@Override
public Table dropColumns(Expression... fields) {
return createTable(operationTreeBuilder.dropColumns(Arrays.asList(fields), operationTree));
}
@Override
public Table map(Expression mapFunction) {
return createTable(operationTreeBuilder.map(mapFunction, operationTree));
}
@Override
public Table flatMap(Expression tableFunction) {
return createTable(operationTreeBuilder.flatMap(tableFunction, operationTree));
}
@Override
public AggregatedTable aggregate(Expression aggregateFunction) {
return groupBy().aggregate(aggregateFunction);
}
@Override
public FlatAggregateTable flatAggregate(Expression tableAggregateFunction) {
return groupBy().flatAggregate(tableAggregateFunction);
}
@Override
public TablePipeline insertInto(String tablePath) {
return insertInto(tablePath, false);
}
@Override
public TablePipeline insertInto(String tablePath, boolean overwrite) {
UnresolvedIdentifier unresolvedIdentifier =
tableEnvironment.getParser().parseIdentifier(tablePath);
ObjectIdentifier objectIdentifier =
tableEnvironment.getCatalogManager().qualifyIdentifier(unresolvedIdentifier);
ContextResolvedTable contextResolvedTable =
tableEnvironment.getCatalogManager().getTableOrError(objectIdentifier);
return insertInto(contextResolvedTable, overwrite);
}
@Override
public TablePipeline insertInto(TableDescriptor descriptor) {
return insertInto(descriptor, false);
}
@Override
public TablePipeline insertInto(TableDescriptor descriptor, boolean overwrite) {
final SchemaTranslator.ConsumingResult schemaTranslationResult =
SchemaTranslator.createConsumingResult(
tableEnvironment.getCatalogManager().getDataTypeFactory(),
getResolvedSchema().toSourceRowDataType(),
descriptor.getSchema().orElse(null),
false);
final TableDescriptor updatedDescriptor =
descriptor.toBuilder().schema(schemaTranslationResult.getSchema()).build();
final ResolvedCatalogTable resolvedCatalogBaseTable =
tableEnvironment
.getCatalogManager()
.resolveCatalogTable(updatedDescriptor.toCatalogTable());
return insertInto(ContextResolvedTable.anonymous(resolvedCatalogBaseTable), overwrite);
}
@Override
public PartitionedTable partitionBy(Expression... fields) {
if (fields.length == 0) {
throw new ValidationException("Partition keys must not be empty.");
}
return new PartitionedTableImpl(this, Arrays.asList(fields));
}
@Override
public ApiExpression asArgument(String name) {
return createArgumentExpression(operationTree, tableEnvironment, name);
}
@Override
public Table process(String path, Object... arguments) {
return tableEnvironment.fromCall(
path, unionTableAndArguments(operationTree, tableEnvironment, arguments));
}
@Override
public Table process(Class<? extends UserDefinedFunction> function, Object... arguments) {
return tableEnvironment.fromCall(
function, unionTableAndArguments(operationTree, tableEnvironment, arguments));
}
private TablePipeline insertInto(ContextResolvedTable contextResolvedTable, boolean overwrite) {
return new TablePipelineImpl(
tableEnvironment,
new SinkModifyOperation(
contextResolvedTable,
getQueryOperation(),
Collections.emptyMap(),
null, // targetColumns
overwrite,
Collections.emptyMap()));
}
@Override
public TableResult execute() {
return tableEnvironment.executeInternal(getQueryOperation());
}
@Override
public String explain(ExplainFormat format, ExplainDetail... extraDetails) {
return tableEnvironment.explainInternal(
Collections.singletonList(getQueryOperation()), format, extraDetails);
}
@Override
public String toString() {
if (tableName == null) {
tableName = "UnnamedTable$" + uniqueId.getAndIncrement();
tableEnvironment.createTemporaryView(tableName, this);
}
return tableName;
}
// --------------------------------------------------------------------------------------------
// Grouped Table
// --------------------------------------------------------------------------------------------
private static final class GroupedTableImpl implements GroupedTable {
private final TableImpl table;
private final List<Expression> groupKeys;
private GroupedTableImpl(TableImpl table, List<Expression> groupKeys) {
this.table = table;
this.groupKeys = groupKeys;
}
@Override
public Table select(Expression... fields) {
List<Expression> expressionsWithResolvedCalls = table.preprocessExpressions(fields);
CategorizedExpressions extracted =
OperationExpressionsUtils.extractAggregationsAndProperties(
expressionsWithResolvedCalls);
if (!extracted.getWindowProperties().isEmpty()) {
throw new ValidationException(
"Window properties can only be used on windowed tables.");
}
return table.createTable(
table.operationTreeBuilder.project(
extracted.getProjections(),
table.operationTreeBuilder.aggregate(
groupKeys, extracted.getAggregations(), table.operationTree)));
}
@Override
public AggregatedTable aggregate(Expression aggregateFunction) {
return new AggregatedTableImpl(table, groupKeys, aggregateFunction);
}
@Override
public FlatAggregateTable flatAggregate(Expression tableAggFunction) {
return new FlatAggregateTableImpl(table, groupKeys, tableAggFunction);
}
}
// --------------------------------------------------------------------------------------------
// Aggregated Table
// --------------------------------------------------------------------------------------------
private static final class AggregatedTableImpl implements AggregatedTable {
private final TableImpl table;
private final List<Expression> groupKeys;
private final Expression aggregateFunction;
private AggregatedTableImpl(
TableImpl table, List<Expression> groupKeys, Expression aggregateFunction) {
this.table = table;
this.groupKeys = groupKeys;
this.aggregateFunction = aggregateFunction;
}
@Override
public Table select(Expression... fields) {
return table.createTable(
table.operationTreeBuilder.project(
Arrays.asList(fields),
table.operationTreeBuilder.aggregate(
groupKeys, aggregateFunction, table.operationTree)));
}
}
// --------------------------------------------------------------------------------------------
// Flat Aggregate Table
// --------------------------------------------------------------------------------------------
private static final class FlatAggregateTableImpl implements FlatAggregateTable {
private final TableImpl table;
private final List<Expression> groupKey;
private final Expression tableAggregateFunction;
private FlatAggregateTableImpl(
TableImpl table, List<Expression> groupKey, Expression tableAggregateFunction) {
this.table = table;
this.groupKey = groupKey;
this.tableAggregateFunction = tableAggregateFunction;
}
@Override
public Table select(Expression... fields) {
return table.createTable(
table.operationTreeBuilder.project(
Arrays.asList(fields),
table.operationTreeBuilder.tableAggregate(
groupKey,
tableAggregateFunction.accept(table.lookupResolver),
table.operationTree)));
}
}
// --------------------------------------------------------------------------------------------
// Group Windowed Table
// --------------------------------------------------------------------------------------------
private static final class GroupWindowedTableImpl implements GroupWindowedTable {
private final TableImpl table;
private final GroupWindow window;
private GroupWindowedTableImpl(TableImpl table, GroupWindow window) {
this.table = table;
this.window = window;
}
@Override
public WindowGroupedTable groupBy(Expression... fields) {
List<Expression> fieldsWithoutWindow =
table.preprocessExpressions(fields).stream()
.filter(f -> !window.getAlias().equals(f))
.collect(Collectors.toList());
if (fields.length != fieldsWithoutWindow.size() + 1) {
throw new ValidationException("GroupBy must contain exactly one window alias.");
}
return new WindowGroupedTableImpl(table, fieldsWithoutWindow, window);
}
}
// --------------------------------------------------------------------------------------------
// Window Grouped Table
// --------------------------------------------------------------------------------------------
private static final class WindowGroupedTableImpl implements WindowGroupedTable {
private final TableImpl table;
private final List<Expression> groupKeys;
private final GroupWindow window;
private WindowGroupedTableImpl(
TableImpl table, List<Expression> groupKeys, GroupWindow window) {
this.table = table;
this.groupKeys = groupKeys;
this.window = window;
}
@Override
public Table select(Expression... fields) {
List<Expression> expressionsWithResolvedCalls = table.preprocessExpressions(fields);
CategorizedExpressions extracted =
OperationExpressionsUtils.extractAggregationsAndProperties(
expressionsWithResolvedCalls);
return table.createTable(
table.operationTreeBuilder.project(
extracted.getProjections(),
table.operationTreeBuilder.windowAggregate(
groupKeys,
window,
extracted.getWindowProperties(),
extracted.getAggregations(),
table.operationTree),
// required for proper resolution of the time attribute in multi-windows
true));
}
@Override
public AggregatedTable aggregate(Expression aggregateFunction) {
return new WindowAggregatedTableImpl(table, groupKeys, aggregateFunction, window);
}
@Override
public FlatAggregateTable flatAggregate(Expression tableAggregateFunction) {
return new WindowFlatAggregateTableImpl(
table, groupKeys, tableAggregateFunction, window);
}
}
// --------------------------------------------------------------------------------------------
// Window Aggregated Table
// --------------------------------------------------------------------------------------------
private static final class WindowAggregatedTableImpl implements AggregatedTable {
private final TableImpl table;
private final List<Expression> groupKeys;
private final Expression aggregateFunction;
private final GroupWindow window;
private WindowAggregatedTableImpl(
TableImpl table,
List<Expression> groupKeys,
Expression aggregateFunction,
GroupWindow window) {
this.table = table;
this.groupKeys = groupKeys;
this.aggregateFunction = aggregateFunction;
this.window = window;
}
@Override
public Table select(Expression... fields) {
List<Expression> expressionsWithResolvedCalls = table.preprocessExpressions(fields);
CategorizedExpressions extracted =
OperationExpressionsUtils.extractAggregationsAndProperties(
expressionsWithResolvedCalls);
if (!extracted.getAggregations().isEmpty()) {
throw new ValidationException(
"Aggregate functions cannot be used in the select right "
+ "after the aggregate.");
}
if (extracted.getProjections().stream()
.anyMatch(
p ->
(p instanceof UnresolvedReferenceExpression)
&& "*"
.equals(
((UnresolvedReferenceExpression) p)
.getName()))) {
throw new ValidationException("Can not use * for window aggregate!");
}
return table.createTable(
table.operationTreeBuilder.project(
extracted.getProjections(),
table.operationTreeBuilder.windowAggregate(
groupKeys,
window,
extracted.getWindowProperties(),
aggregateFunction,
table.operationTree)));
}
}
// --------------------------------------------------------------------------------------------
// Window Flat Aggregate Table
// --------------------------------------------------------------------------------------------
private static final class WindowFlatAggregateTableImpl implements FlatAggregateTable {
private final TableImpl table;
private final List<Expression> groupKeys;
private final Expression tableAggFunction;
private final GroupWindow window;
private WindowFlatAggregateTableImpl(
TableImpl table,
List<Expression> groupKeys,
Expression tableAggFunction,
GroupWindow window) {
this.table = table;
this.groupKeys = groupKeys;
this.tableAggFunction = tableAggFunction;
this.window = window;
}
@Override
public Table select(Expression... fields) {
List<Expression> expressionsWithResolvedCalls = table.preprocessExpressions(fields);
CategorizedExpressions extracted =
OperationExpressionsUtils.extractAggregationsAndProperties(
expressionsWithResolvedCalls);
if (!extracted.getAggregations().isEmpty()) {
throw new ValidationException(
"Aggregate functions cannot be used in the select right "
+ "after the flatAggregate.");
}
if (extracted.getProjections().stream()
.anyMatch(
p ->
(p instanceof UnresolvedReferenceExpression)
&& "*"
.equals(
((UnresolvedReferenceExpression) p)
.getName()))) {
throw new ValidationException("Can not use * for window aggregate!");
}
return table.createTable(
table.operationTreeBuilder.project(
extracted.getProjections(),
table.operationTreeBuilder.windowTableAggregate(
groupKeys,
window,
extracted.getWindowProperties(),
tableAggFunction,
table.operationTree),
// required for proper resolution of the time attribute in multi-windows
true));
}
}
// --------------------------------------------------------------------------------------------
// Over Windowed Table
// --------------------------------------------------------------------------------------------
private static final class OverWindowedTableImpl implements OverWindowedTable {
private final TableImpl table;
private final List<OverWindow> overWindows;
private OverWindowedTableImpl(TableImpl table, List<OverWindow> overWindows) {
this.table = table;
this.overWindows = overWindows;
}
@Override
public Table select(Expression... fields) {
return table.createTable(
table.operationTreeBuilder.project(
Arrays.asList(fields), table.operationTree, overWindows));
}
}
// --------------------------------------------------------------------------------------------
// Partitioned Table
// --------------------------------------------------------------------------------------------
private static final class PartitionedTableImpl implements PartitionedTable {
private final TableImpl table;
private final List<Expression> partitionKeys;
private PartitionedTableImpl(TableImpl table, List<Expression> partitionKeys) {
this.table = table;
this.partitionKeys = partitionKeys;
}
@Override
public ApiExpression asArgument(String name) {
return createArgumentExpression(
createPartitionQueryOperation(), table.tableEnvironment, name);
}
@Override
public Table process(String path, Object... arguments) {
return table.tableEnvironment.fromCall(
path,
unionTableAndArguments(
createPartitionQueryOperation(), table.tableEnvironment, arguments));
}
@Override
public Table process(Class<? extends UserDefinedFunction> function, Object... arguments) {
return table.tableEnvironment.fromCall(
function,
unionTableAndArguments(
createPartitionQueryOperation(), table.tableEnvironment, arguments));
}
private QueryOperation createPartitionQueryOperation() {
return table.operationTreeBuilder.partition(partitionKeys, table.operationTree);
}
}
// --------------------------------------------------------------------------------------------
// Shared methods
// --------------------------------------------------------------------------------------------
private TableImpl createTable(QueryOperation operation) {
return new TableImpl(tableEnvironment, operation, operationTreeBuilder, lookupResolver);
}
private List<Expression> preprocessExpressions(List<Expression> expressions) {
return preprocessExpressions(expressions.toArray(new Expression[0]));
}
private List<Expression> preprocessExpressions(Expression[] expressions) {
return Arrays.stream(expressions)
.map(f -> f.accept(lookupResolver))
.collect(Collectors.toList());
}
private static Object[] unionTableAndArguments(
QueryOperation queryOperation, TableEnvironment env, Object... arguments) {
return Stream.concat(
Stream.of(ApiExpressionUtils.tableRef("ptf_arg", queryOperation, env)),
Stream.of(arguments))
.toArray();
}
private static ApiExpression createArgumentExpression(
QueryOperation queryOperation, TableEnvironment env, String name) {
return new ApiExpression(
ApiExpressionUtils.unresolvedCall(
BuiltInFunctionDefinitions.ASSIGNMENT,
lit(name),
ApiExpressionUtils.tableRef(name, queryOperation, env)));
}
private void checkCommonTableEnvironment(Table right) {
if (((TableImpl) right).getTableEnvironment() != tableEnvironment) {
throw new ValidationException(
"Only tables from the same TableEnvironment can be joined.");
}
}
}
|
apache/hive | 35,917 | standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/WMNullableResourcePlan.java | /**
* Autogenerated by Thrift Compiler (0.16.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.hadoop.hive.metastore.api;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.16.0)")
@org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public class WMNullableResourcePlan implements org.apache.thrift.TBase<WMNullableResourcePlan, WMNullableResourcePlan._Fields>, java.io.Serializable, Cloneable, Comparable<WMNullableResourcePlan> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("WMNullableResourcePlan");
private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("name", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final org.apache.thrift.protocol.TField STATUS_FIELD_DESC = new org.apache.thrift.protocol.TField("status", org.apache.thrift.protocol.TType.I32, (short)2);
private static final org.apache.thrift.protocol.TField QUERY_PARALLELISM_FIELD_DESC = new org.apache.thrift.protocol.TField("queryParallelism", org.apache.thrift.protocol.TType.I32, (short)4);
private static final org.apache.thrift.protocol.TField IS_SET_QUERY_PARALLELISM_FIELD_DESC = new org.apache.thrift.protocol.TField("isSetQueryParallelism", org.apache.thrift.protocol.TType.BOOL, (short)5);
private static final org.apache.thrift.protocol.TField DEFAULT_POOL_PATH_FIELD_DESC = new org.apache.thrift.protocol.TField("defaultPoolPath", org.apache.thrift.protocol.TType.STRING, (short)6);
private static final org.apache.thrift.protocol.TField IS_SET_DEFAULT_POOL_PATH_FIELD_DESC = new org.apache.thrift.protocol.TField("isSetDefaultPoolPath", org.apache.thrift.protocol.TType.BOOL, (short)7);
private static final org.apache.thrift.protocol.TField NS_FIELD_DESC = new org.apache.thrift.protocol.TField("ns", org.apache.thrift.protocol.TType.STRING, (short)8);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new WMNullableResourcePlanStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new WMNullableResourcePlanTupleSchemeFactory();
private @org.apache.thrift.annotation.Nullable java.lang.String name; // optional
private @org.apache.thrift.annotation.Nullable WMResourcePlanStatus status; // optional
private int queryParallelism; // optional
private boolean isSetQueryParallelism; // optional
private @org.apache.thrift.annotation.Nullable java.lang.String defaultPoolPath; // optional
private boolean isSetDefaultPoolPath; // optional
private @org.apache.thrift.annotation.Nullable java.lang.String ns; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
NAME((short)1, "name"),
/**
*
* @see WMResourcePlanStatus
*/
STATUS((short)2, "status"),
QUERY_PARALLELISM((short)4, "queryParallelism"),
IS_SET_QUERY_PARALLELISM((short)5, "isSetQueryParallelism"),
DEFAULT_POOL_PATH((short)6, "defaultPoolPath"),
IS_SET_DEFAULT_POOL_PATH((short)7, "isSetDefaultPoolPath"),
NS((short)8, "ns");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // NAME
return NAME;
case 2: // STATUS
return STATUS;
case 4: // QUERY_PARALLELISM
return QUERY_PARALLELISM;
case 5: // IS_SET_QUERY_PARALLELISM
return IS_SET_QUERY_PARALLELISM;
case 6: // DEFAULT_POOL_PATH
return DEFAULT_POOL_PATH;
case 7: // IS_SET_DEFAULT_POOL_PATH
return IS_SET_DEFAULT_POOL_PATH;
case 8: // NS
return NS;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __QUERYPARALLELISM_ISSET_ID = 0;
private static final int __ISSETQUERYPARALLELISM_ISSET_ID = 1;
private static final int __ISSETDEFAULTPOOLPATH_ISSET_ID = 2;
private byte __isset_bitfield = 0;
private static final _Fields optionals[] = {_Fields.NAME,_Fields.STATUS,_Fields.QUERY_PARALLELISM,_Fields.IS_SET_QUERY_PARALLELISM,_Fields.DEFAULT_POOL_PATH,_Fields.IS_SET_DEFAULT_POOL_PATH,_Fields.NS};
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.NAME, new org.apache.thrift.meta_data.FieldMetaData("name", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.STATUS, new org.apache.thrift.meta_data.FieldMetaData("status", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, WMResourcePlanStatus.class)));
tmpMap.put(_Fields.QUERY_PARALLELISM, new org.apache.thrift.meta_data.FieldMetaData("queryParallelism", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
tmpMap.put(_Fields.IS_SET_QUERY_PARALLELISM, new org.apache.thrift.meta_data.FieldMetaData("isSetQueryParallelism", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
tmpMap.put(_Fields.DEFAULT_POOL_PATH, new org.apache.thrift.meta_data.FieldMetaData("defaultPoolPath", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.IS_SET_DEFAULT_POOL_PATH, new org.apache.thrift.meta_data.FieldMetaData("isSetDefaultPoolPath", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
tmpMap.put(_Fields.NS, new org.apache.thrift.meta_data.FieldMetaData("ns", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(WMNullableResourcePlan.class, metaDataMap);
}
public WMNullableResourcePlan() {
}
/**
* Performs a deep copy on <i>other</i>.
*/
public WMNullableResourcePlan(WMNullableResourcePlan other) {
__isset_bitfield = other.__isset_bitfield;
if (other.isSetName()) {
this.name = other.name;
}
if (other.isSetStatus()) {
this.status = other.status;
}
this.queryParallelism = other.queryParallelism;
this.isSetQueryParallelism = other.isSetQueryParallelism;
if (other.isSetDefaultPoolPath()) {
this.defaultPoolPath = other.defaultPoolPath;
}
this.isSetDefaultPoolPath = other.isSetDefaultPoolPath;
if (other.isSetNs()) {
this.ns = other.ns;
}
}
public WMNullableResourcePlan deepCopy() {
return new WMNullableResourcePlan(this);
}
@Override
public void clear() {
this.name = null;
this.status = null;
setQueryParallelismIsSet(false);
this.queryParallelism = 0;
setIsSetQueryParallelismIsSet(false);
this.isSetQueryParallelism = false;
this.defaultPoolPath = null;
setIsSetDefaultPoolPathIsSet(false);
this.isSetDefaultPoolPath = false;
this.ns = null;
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getName() {
return this.name;
}
public void setName(@org.apache.thrift.annotation.Nullable java.lang.String name) {
this.name = name;
}
public void unsetName() {
this.name = null;
}
/** Returns true if field name is set (has been assigned a value) and false otherwise */
public boolean isSetName() {
return this.name != null;
}
public void setNameIsSet(boolean value) {
if (!value) {
this.name = null;
}
}
/**
*
* @see WMResourcePlanStatus
*/
@org.apache.thrift.annotation.Nullable
public WMResourcePlanStatus getStatus() {
return this.status;
}
/**
*
* @see WMResourcePlanStatus
*/
public void setStatus(@org.apache.thrift.annotation.Nullable WMResourcePlanStatus status) {
this.status = status;
}
public void unsetStatus() {
this.status = null;
}
/** Returns true if field status is set (has been assigned a value) and false otherwise */
public boolean isSetStatus() {
return this.status != null;
}
public void setStatusIsSet(boolean value) {
if (!value) {
this.status = null;
}
}
public int getQueryParallelism() {
return this.queryParallelism;
}
public void setQueryParallelism(int queryParallelism) {
this.queryParallelism = queryParallelism;
setQueryParallelismIsSet(true);
}
public void unsetQueryParallelism() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __QUERYPARALLELISM_ISSET_ID);
}
/** Returns true if field queryParallelism is set (has been assigned a value) and false otherwise */
public boolean isSetQueryParallelism() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __QUERYPARALLELISM_ISSET_ID);
}
public void setQueryParallelismIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __QUERYPARALLELISM_ISSET_ID, value);
}
public boolean isIsSetQueryParallelism() {
return this.isSetQueryParallelism;
}
public void setIsSetQueryParallelism(boolean isSetQueryParallelism) {
this.isSetQueryParallelism = isSetQueryParallelism;
setIsSetQueryParallelismIsSet(true);
}
public void unsetIsSetQueryParallelism() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __ISSETQUERYPARALLELISM_ISSET_ID);
}
/** Returns true if field isSetQueryParallelism is set (has been assigned a value) and false otherwise */
public boolean isSetIsSetQueryParallelism() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __ISSETQUERYPARALLELISM_ISSET_ID);
}
public void setIsSetQueryParallelismIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __ISSETQUERYPARALLELISM_ISSET_ID, value);
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getDefaultPoolPath() {
return this.defaultPoolPath;
}
public void setDefaultPoolPath(@org.apache.thrift.annotation.Nullable java.lang.String defaultPoolPath) {
this.defaultPoolPath = defaultPoolPath;
}
public void unsetDefaultPoolPath() {
this.defaultPoolPath = null;
}
/** Returns true if field defaultPoolPath is set (has been assigned a value) and false otherwise */
public boolean isSetDefaultPoolPath() {
return this.defaultPoolPath != null;
}
public void setDefaultPoolPathIsSet(boolean value) {
if (!value) {
this.defaultPoolPath = null;
}
}
public boolean isIsSetDefaultPoolPath() {
return this.isSetDefaultPoolPath;
}
public void setIsSetDefaultPoolPath(boolean isSetDefaultPoolPath) {
this.isSetDefaultPoolPath = isSetDefaultPoolPath;
setIsSetDefaultPoolPathIsSet(true);
}
public void unsetIsSetDefaultPoolPath() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __ISSETDEFAULTPOOLPATH_ISSET_ID);
}
/** Returns true if field isSetDefaultPoolPath is set (has been assigned a value) and false otherwise */
public boolean isSetIsSetDefaultPoolPath() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __ISSETDEFAULTPOOLPATH_ISSET_ID);
}
public void setIsSetDefaultPoolPathIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __ISSETDEFAULTPOOLPATH_ISSET_ID, value);
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getNs() {
return this.ns;
}
public void setNs(@org.apache.thrift.annotation.Nullable java.lang.String ns) {
this.ns = ns;
}
public void unsetNs() {
this.ns = null;
}
/** Returns true if field ns is set (has been assigned a value) and false otherwise */
public boolean isSetNs() {
return this.ns != null;
}
public void setNsIsSet(boolean value) {
if (!value) {
this.ns = null;
}
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
case NAME:
if (value == null) {
unsetName();
} else {
setName((java.lang.String)value);
}
break;
case STATUS:
if (value == null) {
unsetStatus();
} else {
setStatus((WMResourcePlanStatus)value);
}
break;
case QUERY_PARALLELISM:
if (value == null) {
unsetQueryParallelism();
} else {
setQueryParallelism((java.lang.Integer)value);
}
break;
case IS_SET_QUERY_PARALLELISM:
if (value == null) {
unsetIsSetQueryParallelism();
} else {
setIsSetQueryParallelism((java.lang.Boolean)value);
}
break;
case DEFAULT_POOL_PATH:
if (value == null) {
unsetDefaultPoolPath();
} else {
setDefaultPoolPath((java.lang.String)value);
}
break;
case IS_SET_DEFAULT_POOL_PATH:
if (value == null) {
unsetIsSetDefaultPoolPath();
} else {
setIsSetDefaultPoolPath((java.lang.Boolean)value);
}
break;
case NS:
if (value == null) {
unsetNs();
} else {
setNs((java.lang.String)value);
}
break;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case NAME:
return getName();
case STATUS:
return getStatus();
case QUERY_PARALLELISM:
return getQueryParallelism();
case IS_SET_QUERY_PARALLELISM:
return isIsSetQueryParallelism();
case DEFAULT_POOL_PATH:
return getDefaultPoolPath();
case IS_SET_DEFAULT_POOL_PATH:
return isIsSetDefaultPoolPath();
case NS:
return getNs();
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case NAME:
return isSetName();
case STATUS:
return isSetStatus();
case QUERY_PARALLELISM:
return isSetQueryParallelism();
case IS_SET_QUERY_PARALLELISM:
return isSetIsSetQueryParallelism();
case DEFAULT_POOL_PATH:
return isSetDefaultPoolPath();
case IS_SET_DEFAULT_POOL_PATH:
return isSetIsSetDefaultPoolPath();
case NS:
return isSetNs();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that instanceof WMNullableResourcePlan)
return this.equals((WMNullableResourcePlan)that);
return false;
}
public boolean equals(WMNullableResourcePlan that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_name = true && this.isSetName();
boolean that_present_name = true && that.isSetName();
if (this_present_name || that_present_name) {
if (!(this_present_name && that_present_name))
return false;
if (!this.name.equals(that.name))
return false;
}
boolean this_present_status = true && this.isSetStatus();
boolean that_present_status = true && that.isSetStatus();
if (this_present_status || that_present_status) {
if (!(this_present_status && that_present_status))
return false;
if (!this.status.equals(that.status))
return false;
}
boolean this_present_queryParallelism = true && this.isSetQueryParallelism();
boolean that_present_queryParallelism = true && that.isSetQueryParallelism();
if (this_present_queryParallelism || that_present_queryParallelism) {
if (!(this_present_queryParallelism && that_present_queryParallelism))
return false;
if (this.queryParallelism != that.queryParallelism)
return false;
}
boolean this_present_isSetQueryParallelism = true && this.isSetIsSetQueryParallelism();
boolean that_present_isSetQueryParallelism = true && that.isSetIsSetQueryParallelism();
if (this_present_isSetQueryParallelism || that_present_isSetQueryParallelism) {
if (!(this_present_isSetQueryParallelism && that_present_isSetQueryParallelism))
return false;
if (this.isSetQueryParallelism != that.isSetQueryParallelism)
return false;
}
boolean this_present_defaultPoolPath = true && this.isSetDefaultPoolPath();
boolean that_present_defaultPoolPath = true && that.isSetDefaultPoolPath();
if (this_present_defaultPoolPath || that_present_defaultPoolPath) {
if (!(this_present_defaultPoolPath && that_present_defaultPoolPath))
return false;
if (!this.defaultPoolPath.equals(that.defaultPoolPath))
return false;
}
boolean this_present_isSetDefaultPoolPath = true && this.isSetIsSetDefaultPoolPath();
boolean that_present_isSetDefaultPoolPath = true && that.isSetIsSetDefaultPoolPath();
if (this_present_isSetDefaultPoolPath || that_present_isSetDefaultPoolPath) {
if (!(this_present_isSetDefaultPoolPath && that_present_isSetDefaultPoolPath))
return false;
if (this.isSetDefaultPoolPath != that.isSetDefaultPoolPath)
return false;
}
boolean this_present_ns = true && this.isSetNs();
boolean that_present_ns = true && that.isSetNs();
if (this_present_ns || that_present_ns) {
if (!(this_present_ns && that_present_ns))
return false;
if (!this.ns.equals(that.ns))
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + ((isSetName()) ? 131071 : 524287);
if (isSetName())
hashCode = hashCode * 8191 + name.hashCode();
hashCode = hashCode * 8191 + ((isSetStatus()) ? 131071 : 524287);
if (isSetStatus())
hashCode = hashCode * 8191 + status.getValue();
hashCode = hashCode * 8191 + ((isSetQueryParallelism()) ? 131071 : 524287);
if (isSetQueryParallelism())
hashCode = hashCode * 8191 + queryParallelism;
hashCode = hashCode * 8191 + ((isSetIsSetQueryParallelism()) ? 131071 : 524287);
if (isSetIsSetQueryParallelism())
hashCode = hashCode * 8191 + ((isSetQueryParallelism) ? 131071 : 524287);
hashCode = hashCode * 8191 + ((isSetDefaultPoolPath()) ? 131071 : 524287);
if (isSetDefaultPoolPath())
hashCode = hashCode * 8191 + defaultPoolPath.hashCode();
hashCode = hashCode * 8191 + ((isSetIsSetDefaultPoolPath()) ? 131071 : 524287);
if (isSetIsSetDefaultPoolPath())
hashCode = hashCode * 8191 + ((isSetDefaultPoolPath) ? 131071 : 524287);
hashCode = hashCode * 8191 + ((isSetNs()) ? 131071 : 524287);
if (isSetNs())
hashCode = hashCode * 8191 + ns.hashCode();
return hashCode;
}
@Override
public int compareTo(WMNullableResourcePlan other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.compare(isSetName(), other.isSetName());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetName()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.name, other.name);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetStatus(), other.isSetStatus());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetStatus()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.status, other.status);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetQueryParallelism(), other.isSetQueryParallelism());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetQueryParallelism()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.queryParallelism, other.queryParallelism);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetIsSetQueryParallelism(), other.isSetIsSetQueryParallelism());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetIsSetQueryParallelism()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.isSetQueryParallelism, other.isSetQueryParallelism);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetDefaultPoolPath(), other.isSetDefaultPoolPath());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetDefaultPoolPath()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.defaultPoolPath, other.defaultPoolPath);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetIsSetDefaultPoolPath(), other.isSetIsSetDefaultPoolPath());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetIsSetDefaultPoolPath()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.isSetDefaultPoolPath, other.isSetDefaultPoolPath);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetNs(), other.isSetNs());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetNs()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.ns, other.ns);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("WMNullableResourcePlan(");
boolean first = true;
if (isSetName()) {
sb.append("name:");
if (this.name == null) {
sb.append("null");
} else {
sb.append(this.name);
}
first = false;
}
if (isSetStatus()) {
if (!first) sb.append(", ");
sb.append("status:");
if (this.status == null) {
sb.append("null");
} else {
sb.append(this.status);
}
first = false;
}
if (isSetQueryParallelism()) {
if (!first) sb.append(", ");
sb.append("queryParallelism:");
sb.append(this.queryParallelism);
first = false;
}
if (isSetIsSetQueryParallelism()) {
if (!first) sb.append(", ");
sb.append("isSetQueryParallelism:");
sb.append(this.isSetQueryParallelism);
first = false;
}
if (isSetDefaultPoolPath()) {
if (!first) sb.append(", ");
sb.append("defaultPoolPath:");
if (this.defaultPoolPath == null) {
sb.append("null");
} else {
sb.append(this.defaultPoolPath);
}
first = false;
}
if (isSetIsSetDefaultPoolPath()) {
if (!first) sb.append(", ");
sb.append("isSetDefaultPoolPath:");
sb.append(this.isSetDefaultPoolPath);
first = false;
}
if (isSetNs()) {
if (!first) sb.append(", ");
sb.append("ns:");
if (this.ns == null) {
sb.append("null");
} else {
sb.append(this.ns);
}
first = false;
}
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class WMNullableResourcePlanStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public WMNullableResourcePlanStandardScheme getScheme() {
return new WMNullableResourcePlanStandardScheme();
}
}
private static class WMNullableResourcePlanStandardScheme extends org.apache.thrift.scheme.StandardScheme<WMNullableResourcePlan> {
public void read(org.apache.thrift.protocol.TProtocol iprot, WMNullableResourcePlan struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // NAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.name = iprot.readString();
struct.setNameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // STATUS
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.status = org.apache.hadoop.hive.metastore.api.WMResourcePlanStatus.findByValue(iprot.readI32());
struct.setStatusIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 4: // QUERY_PARALLELISM
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.queryParallelism = iprot.readI32();
struct.setQueryParallelismIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 5: // IS_SET_QUERY_PARALLELISM
if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) {
struct.isSetQueryParallelism = iprot.readBool();
struct.setIsSetQueryParallelismIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 6: // DEFAULT_POOL_PATH
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.defaultPoolPath = iprot.readString();
struct.setDefaultPoolPathIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 7: // IS_SET_DEFAULT_POOL_PATH
if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) {
struct.isSetDefaultPoolPath = iprot.readBool();
struct.setIsSetDefaultPoolPathIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 8: // NS
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.ns = iprot.readString();
struct.setNsIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, WMNullableResourcePlan struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.name != null) {
if (struct.isSetName()) {
oprot.writeFieldBegin(NAME_FIELD_DESC);
oprot.writeString(struct.name);
oprot.writeFieldEnd();
}
}
if (struct.status != null) {
if (struct.isSetStatus()) {
oprot.writeFieldBegin(STATUS_FIELD_DESC);
oprot.writeI32(struct.status.getValue());
oprot.writeFieldEnd();
}
}
if (struct.isSetQueryParallelism()) {
oprot.writeFieldBegin(QUERY_PARALLELISM_FIELD_DESC);
oprot.writeI32(struct.queryParallelism);
oprot.writeFieldEnd();
}
if (struct.isSetIsSetQueryParallelism()) {
oprot.writeFieldBegin(IS_SET_QUERY_PARALLELISM_FIELD_DESC);
oprot.writeBool(struct.isSetQueryParallelism);
oprot.writeFieldEnd();
}
if (struct.defaultPoolPath != null) {
if (struct.isSetDefaultPoolPath()) {
oprot.writeFieldBegin(DEFAULT_POOL_PATH_FIELD_DESC);
oprot.writeString(struct.defaultPoolPath);
oprot.writeFieldEnd();
}
}
if (struct.isSetIsSetDefaultPoolPath()) {
oprot.writeFieldBegin(IS_SET_DEFAULT_POOL_PATH_FIELD_DESC);
oprot.writeBool(struct.isSetDefaultPoolPath);
oprot.writeFieldEnd();
}
if (struct.ns != null) {
if (struct.isSetNs()) {
oprot.writeFieldBegin(NS_FIELD_DESC);
oprot.writeString(struct.ns);
oprot.writeFieldEnd();
}
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class WMNullableResourcePlanTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public WMNullableResourcePlanTupleScheme getScheme() {
return new WMNullableResourcePlanTupleScheme();
}
}
private static class WMNullableResourcePlanTupleScheme extends org.apache.thrift.scheme.TupleScheme<WMNullableResourcePlan> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, WMNullableResourcePlan struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetName()) {
optionals.set(0);
}
if (struct.isSetStatus()) {
optionals.set(1);
}
if (struct.isSetQueryParallelism()) {
optionals.set(2);
}
if (struct.isSetIsSetQueryParallelism()) {
optionals.set(3);
}
if (struct.isSetDefaultPoolPath()) {
optionals.set(4);
}
if (struct.isSetIsSetDefaultPoolPath()) {
optionals.set(5);
}
if (struct.isSetNs()) {
optionals.set(6);
}
oprot.writeBitSet(optionals, 7);
if (struct.isSetName()) {
oprot.writeString(struct.name);
}
if (struct.isSetStatus()) {
oprot.writeI32(struct.status.getValue());
}
if (struct.isSetQueryParallelism()) {
oprot.writeI32(struct.queryParallelism);
}
if (struct.isSetIsSetQueryParallelism()) {
oprot.writeBool(struct.isSetQueryParallelism);
}
if (struct.isSetDefaultPoolPath()) {
oprot.writeString(struct.defaultPoolPath);
}
if (struct.isSetIsSetDefaultPoolPath()) {
oprot.writeBool(struct.isSetDefaultPoolPath);
}
if (struct.isSetNs()) {
oprot.writeString(struct.ns);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, WMNullableResourcePlan struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet incoming = iprot.readBitSet(7);
if (incoming.get(0)) {
struct.name = iprot.readString();
struct.setNameIsSet(true);
}
if (incoming.get(1)) {
struct.status = org.apache.hadoop.hive.metastore.api.WMResourcePlanStatus.findByValue(iprot.readI32());
struct.setStatusIsSet(true);
}
if (incoming.get(2)) {
struct.queryParallelism = iprot.readI32();
struct.setQueryParallelismIsSet(true);
}
if (incoming.get(3)) {
struct.isSetQueryParallelism = iprot.readBool();
struct.setIsSetQueryParallelismIsSet(true);
}
if (incoming.get(4)) {
struct.defaultPoolPath = iprot.readString();
struct.setDefaultPoolPathIsSet(true);
}
if (incoming.get(5)) {
struct.isSetDefaultPoolPath = iprot.readBool();
struct.setIsSetDefaultPoolPathIsSet(true);
}
if (incoming.get(6)) {
struct.ns = iprot.readString();
struct.setNsIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
}
|
googleapis/google-cloud-java | 35,756 | java-visionai/proto-google-cloud-visionai-v1/src/main/java/com/google/cloud/visionai/v1/AppPlatformMetadata.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/visionai/v1/annotations.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.visionai.v1;
/**
*
*
* <pre>
* Message of essential metadata of App Platform.
* This message is usually attached to a certain processor output annotation for
* customer to identify the source of the data.
* </pre>
*
* Protobuf type {@code google.cloud.visionai.v1.AppPlatformMetadata}
*/
public final class AppPlatformMetadata extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.visionai.v1.AppPlatformMetadata)
AppPlatformMetadataOrBuilder {
private static final long serialVersionUID = 0L;
// Use AppPlatformMetadata.newBuilder() to construct.
private AppPlatformMetadata(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AppPlatformMetadata() {
application_ = "";
instanceId_ = "";
node_ = "";
processor_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new AppPlatformMetadata();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.visionai.v1.AnnotationsProto
.internal_static_google_cloud_visionai_v1_AppPlatformMetadata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.visionai.v1.AnnotationsProto
.internal_static_google_cloud_visionai_v1_AppPlatformMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.visionai.v1.AppPlatformMetadata.class,
com.google.cloud.visionai.v1.AppPlatformMetadata.Builder.class);
}
public static final int APPLICATION_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object application_ = "";
/**
*
*
* <pre>
* The application resource name.
* </pre>
*
* <code>string application = 1;</code>
*
* @return The application.
*/
@java.lang.Override
public java.lang.String getApplication() {
java.lang.Object ref = application_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
application_ = s;
return s;
}
}
/**
*
*
* <pre>
* The application resource name.
* </pre>
*
* <code>string application = 1;</code>
*
* @return The bytes for application.
*/
@java.lang.Override
public com.google.protobuf.ByteString getApplicationBytes() {
java.lang.Object ref = application_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
application_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int INSTANCE_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object instanceId_ = "";
/**
*
*
* <pre>
* The instance resource id. Instance is the nested resource of application
* under collection 'instances'.
* </pre>
*
* <code>string instance_id = 2;</code>
*
* @return The instanceId.
*/
@java.lang.Override
public java.lang.String getInstanceId() {
java.lang.Object ref = instanceId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
instanceId_ = s;
return s;
}
}
/**
*
*
* <pre>
* The instance resource id. Instance is the nested resource of application
* under collection 'instances'.
* </pre>
*
* <code>string instance_id = 2;</code>
*
* @return The bytes for instanceId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getInstanceIdBytes() {
java.lang.Object ref = instanceId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
instanceId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int NODE_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object node_ = "";
/**
*
*
* <pre>
* The node name of the application graph.
* </pre>
*
* <code>string node = 3;</code>
*
* @return The node.
*/
@java.lang.Override
public java.lang.String getNode() {
java.lang.Object ref = node_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
node_ = s;
return s;
}
}
/**
*
*
* <pre>
* The node name of the application graph.
* </pre>
*
* <code>string node = 3;</code>
*
* @return The bytes for node.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNodeBytes() {
java.lang.Object ref = node_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
node_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PROCESSOR_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object processor_ = "";
/**
*
*
* <pre>
* The referred processor resource name of the application node.
* </pre>
*
* <code>string processor = 4;</code>
*
* @return The processor.
*/
@java.lang.Override
public java.lang.String getProcessor() {
java.lang.Object ref = processor_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
processor_ = s;
return s;
}
}
/**
*
*
* <pre>
* The referred processor resource name of the application node.
* </pre>
*
* <code>string processor = 4;</code>
*
* @return The bytes for processor.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProcessorBytes() {
java.lang.Object ref = processor_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
processor_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(application_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, application_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, instanceId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(node_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, node_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(processor_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, processor_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(application_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, application_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, instanceId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(node_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, node_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(processor_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, processor_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.visionai.v1.AppPlatformMetadata)) {
return super.equals(obj);
}
com.google.cloud.visionai.v1.AppPlatformMetadata other =
(com.google.cloud.visionai.v1.AppPlatformMetadata) obj;
if (!getApplication().equals(other.getApplication())) return false;
if (!getInstanceId().equals(other.getInstanceId())) return false;
if (!getNode().equals(other.getNode())) return false;
if (!getProcessor().equals(other.getProcessor())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + APPLICATION_FIELD_NUMBER;
hash = (53 * hash) + getApplication().hashCode();
hash = (37 * hash) + INSTANCE_ID_FIELD_NUMBER;
hash = (53 * hash) + getInstanceId().hashCode();
hash = (37 * hash) + NODE_FIELD_NUMBER;
hash = (53 * hash) + getNode().hashCode();
hash = (37 * hash) + PROCESSOR_FIELD_NUMBER;
hash = (53 * hash) + getProcessor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.visionai.v1.AppPlatformMetadata parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.AppPlatformMetadata parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.AppPlatformMetadata parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.AppPlatformMetadata parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.AppPlatformMetadata parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.AppPlatformMetadata parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.AppPlatformMetadata parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.AppPlatformMetadata parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.visionai.v1.AppPlatformMetadata parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.AppPlatformMetadata parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.visionai.v1.AppPlatformMetadata parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.AppPlatformMetadata parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.visionai.v1.AppPlatformMetadata prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message of essential metadata of App Platform.
* This message is usually attached to a certain processor output annotation for
* customer to identify the source of the data.
* </pre>
*
* Protobuf type {@code google.cloud.visionai.v1.AppPlatformMetadata}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.visionai.v1.AppPlatformMetadata)
com.google.cloud.visionai.v1.AppPlatformMetadataOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.visionai.v1.AnnotationsProto
.internal_static_google_cloud_visionai_v1_AppPlatformMetadata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.visionai.v1.AnnotationsProto
.internal_static_google_cloud_visionai_v1_AppPlatformMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.visionai.v1.AppPlatformMetadata.class,
com.google.cloud.visionai.v1.AppPlatformMetadata.Builder.class);
}
// Construct using com.google.cloud.visionai.v1.AppPlatformMetadata.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
application_ = "";
instanceId_ = "";
node_ = "";
processor_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.visionai.v1.AnnotationsProto
.internal_static_google_cloud_visionai_v1_AppPlatformMetadata_descriptor;
}
@java.lang.Override
public com.google.cloud.visionai.v1.AppPlatformMetadata getDefaultInstanceForType() {
return com.google.cloud.visionai.v1.AppPlatformMetadata.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.visionai.v1.AppPlatformMetadata build() {
com.google.cloud.visionai.v1.AppPlatformMetadata result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.visionai.v1.AppPlatformMetadata buildPartial() {
com.google.cloud.visionai.v1.AppPlatformMetadata result =
new com.google.cloud.visionai.v1.AppPlatformMetadata(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.visionai.v1.AppPlatformMetadata result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.application_ = application_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.instanceId_ = instanceId_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.node_ = node_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.processor_ = processor_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.visionai.v1.AppPlatformMetadata) {
return mergeFrom((com.google.cloud.visionai.v1.AppPlatformMetadata) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.visionai.v1.AppPlatformMetadata other) {
if (other == com.google.cloud.visionai.v1.AppPlatformMetadata.getDefaultInstance())
return this;
if (!other.getApplication().isEmpty()) {
application_ = other.application_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getInstanceId().isEmpty()) {
instanceId_ = other.instanceId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getNode().isEmpty()) {
node_ = other.node_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getProcessor().isEmpty()) {
processor_ = other.processor_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
application_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
instanceId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
node_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
processor_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object application_ = "";
/**
*
*
* <pre>
* The application resource name.
* </pre>
*
* <code>string application = 1;</code>
*
* @return The application.
*/
public java.lang.String getApplication() {
java.lang.Object ref = application_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
application_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The application resource name.
* </pre>
*
* <code>string application = 1;</code>
*
* @return The bytes for application.
*/
public com.google.protobuf.ByteString getApplicationBytes() {
java.lang.Object ref = application_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
application_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The application resource name.
* </pre>
*
* <code>string application = 1;</code>
*
* @param value The application to set.
* @return This builder for chaining.
*/
public Builder setApplication(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
application_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The application resource name.
* </pre>
*
* <code>string application = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearApplication() {
application_ = getDefaultInstance().getApplication();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The application resource name.
* </pre>
*
* <code>string application = 1;</code>
*
* @param value The bytes for application to set.
* @return This builder for chaining.
*/
public Builder setApplicationBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
application_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object instanceId_ = "";
/**
*
*
* <pre>
* The instance resource id. Instance is the nested resource of application
* under collection 'instances'.
* </pre>
*
* <code>string instance_id = 2;</code>
*
* @return The instanceId.
*/
public java.lang.String getInstanceId() {
java.lang.Object ref = instanceId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
instanceId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The instance resource id. Instance is the nested resource of application
* under collection 'instances'.
* </pre>
*
* <code>string instance_id = 2;</code>
*
* @return The bytes for instanceId.
*/
public com.google.protobuf.ByteString getInstanceIdBytes() {
java.lang.Object ref = instanceId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
instanceId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The instance resource id. Instance is the nested resource of application
* under collection 'instances'.
* </pre>
*
* <code>string instance_id = 2;</code>
*
* @param value The instanceId to set.
* @return This builder for chaining.
*/
public Builder setInstanceId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
instanceId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The instance resource id. Instance is the nested resource of application
* under collection 'instances'.
* </pre>
*
* <code>string instance_id = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearInstanceId() {
instanceId_ = getDefaultInstance().getInstanceId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The instance resource id. Instance is the nested resource of application
* under collection 'instances'.
* </pre>
*
* <code>string instance_id = 2;</code>
*
* @param value The bytes for instanceId to set.
* @return This builder for chaining.
*/
public Builder setInstanceIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
instanceId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object node_ = "";
/**
*
*
* <pre>
* The node name of the application graph.
* </pre>
*
* <code>string node = 3;</code>
*
* @return The node.
*/
public java.lang.String getNode() {
java.lang.Object ref = node_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
node_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The node name of the application graph.
* </pre>
*
* <code>string node = 3;</code>
*
* @return The bytes for node.
*/
public com.google.protobuf.ByteString getNodeBytes() {
java.lang.Object ref = node_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
node_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The node name of the application graph.
* </pre>
*
* <code>string node = 3;</code>
*
* @param value The node to set.
* @return This builder for chaining.
*/
public Builder setNode(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
node_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The node name of the application graph.
* </pre>
*
* <code>string node = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearNode() {
node_ = getDefaultInstance().getNode();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The node name of the application graph.
* </pre>
*
* <code>string node = 3;</code>
*
* @param value The bytes for node to set.
* @return This builder for chaining.
*/
public Builder setNodeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
node_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object processor_ = "";
/**
*
*
* <pre>
* The referred processor resource name of the application node.
* </pre>
*
* <code>string processor = 4;</code>
*
* @return The processor.
*/
public java.lang.String getProcessor() {
java.lang.Object ref = processor_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
processor_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The referred processor resource name of the application node.
* </pre>
*
* <code>string processor = 4;</code>
*
* @return The bytes for processor.
*/
public com.google.protobuf.ByteString getProcessorBytes() {
java.lang.Object ref = processor_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
processor_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The referred processor resource name of the application node.
* </pre>
*
* <code>string processor = 4;</code>
*
* @param value The processor to set.
* @return This builder for chaining.
*/
public Builder setProcessor(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
processor_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* The referred processor resource name of the application node.
* </pre>
*
* <code>string processor = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearProcessor() {
processor_ = getDefaultInstance().getProcessor();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* The referred processor resource name of the application node.
* </pre>
*
* <code>string processor = 4;</code>
*
* @param value The bytes for processor to set.
* @return This builder for chaining.
*/
public Builder setProcessorBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
processor_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.visionai.v1.AppPlatformMetadata)
}
// @@protoc_insertion_point(class_scope:google.cloud.visionai.v1.AppPlatformMetadata)
private static final com.google.cloud.visionai.v1.AppPlatformMetadata DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.visionai.v1.AppPlatformMetadata();
}
public static com.google.cloud.visionai.v1.AppPlatformMetadata getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AppPlatformMetadata> PARSER =
new com.google.protobuf.AbstractParser<AppPlatformMetadata>() {
@java.lang.Override
public AppPlatformMetadata parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<AppPlatformMetadata> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AppPlatformMetadata> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.visionai.v1.AppPlatformMetadata getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,787 | java-discoveryengine/proto-google-cloud-discoveryengine-v1/src/main/java/com/google/cloud/discoveryengine/v1/RankingRecord.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1/rank_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1;
/**
*
*
* <pre>
* Record message for
* [RankService.Rank][google.cloud.discoveryengine.v1.RankService.Rank] method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.RankingRecord}
*/
public final class RankingRecord extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1.RankingRecord)
RankingRecordOrBuilder {
private static final long serialVersionUID = 0L;
// Use RankingRecord.newBuilder() to construct.
private RankingRecord(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RankingRecord() {
id_ = "";
title_ = "";
content_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RankingRecord();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.RankServiceProto
.internal_static_google_cloud_discoveryengine_v1_RankingRecord_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.RankServiceProto
.internal_static_google_cloud_discoveryengine_v1_RankingRecord_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.RankingRecord.class,
com.google.cloud.discoveryengine.v1.RankingRecord.Builder.class);
}
public static final int ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object id_ = "";
/**
*
*
* <pre>
* The unique ID to represent the record.
* </pre>
*
* <code>string id = 1;</code>
*
* @return The id.
*/
@java.lang.Override
public java.lang.String getId() {
java.lang.Object ref = id_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
id_ = s;
return s;
}
}
/**
*
*
* <pre>
* The unique ID to represent the record.
* </pre>
*
* <code>string id = 1;</code>
*
* @return The bytes for id.
*/
@java.lang.Override
public com.google.protobuf.ByteString getIdBytes() {
java.lang.Object ref = id_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
id_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TITLE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object title_ = "";
/**
*
*
* <pre>
* The title of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1.RankingRecord.content] should be
* set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string title = 2;</code>
*
* @return The title.
*/
@java.lang.Override
public java.lang.String getTitle() {
java.lang.Object ref = title_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
title_ = s;
return s;
}
}
/**
*
*
* <pre>
* The title of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1.RankingRecord.content] should be
* set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string title = 2;</code>
*
* @return The bytes for title.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTitleBytes() {
java.lang.Object ref = title_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
title_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CONTENT_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object content_ = "";
/**
*
*
* <pre>
* The content of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1.RankingRecord.content] should be
* set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string content = 3;</code>
*
* @return The content.
*/
@java.lang.Override
public java.lang.String getContent() {
java.lang.Object ref = content_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
content_ = s;
return s;
}
}
/**
*
*
* <pre>
* The content of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1.RankingRecord.content] should be
* set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string content = 3;</code>
*
* @return The bytes for content.
*/
@java.lang.Override
public com.google.protobuf.ByteString getContentBytes() {
java.lang.Object ref = content_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
content_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SCORE_FIELD_NUMBER = 4;
private float score_ = 0F;
/**
*
*
* <pre>
* The score of this record based on the given query and selected model.
* The score will be rounded to 2 decimal places. If the score is close to 0,
* it will be rounded to 0.0001 to avoid returning unset.
* </pre>
*
* <code>float score = 4;</code>
*
* @return The score.
*/
@java.lang.Override
public float getScore() {
return score_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, id_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(title_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, title_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(content_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, content_);
}
if (java.lang.Float.floatToRawIntBits(score_) != 0) {
output.writeFloat(4, score_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, id_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(title_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, title_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(content_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, content_);
}
if (java.lang.Float.floatToRawIntBits(score_) != 0) {
size += com.google.protobuf.CodedOutputStream.computeFloatSize(4, score_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.discoveryengine.v1.RankingRecord)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1.RankingRecord other =
(com.google.cloud.discoveryengine.v1.RankingRecord) obj;
if (!getId().equals(other.getId())) return false;
if (!getTitle().equals(other.getTitle())) return false;
if (!getContent().equals(other.getContent())) return false;
if (java.lang.Float.floatToIntBits(getScore())
!= java.lang.Float.floatToIntBits(other.getScore())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + ID_FIELD_NUMBER;
hash = (53 * hash) + getId().hashCode();
hash = (37 * hash) + TITLE_FIELD_NUMBER;
hash = (53 * hash) + getTitle().hashCode();
hash = (37 * hash) + CONTENT_FIELD_NUMBER;
hash = (53 * hash) + getContent().hashCode();
hash = (37 * hash) + SCORE_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(getScore());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1.RankingRecord parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.RankingRecord parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.RankingRecord parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.RankingRecord parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.RankingRecord parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.RankingRecord parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.RankingRecord parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.RankingRecord parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.RankingRecord parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.RankingRecord parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.RankingRecord parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.RankingRecord parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.discoveryengine.v1.RankingRecord prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Record message for
* [RankService.Rank][google.cloud.discoveryengine.v1.RankService.Rank] method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.RankingRecord}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1.RankingRecord)
com.google.cloud.discoveryengine.v1.RankingRecordOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.RankServiceProto
.internal_static_google_cloud_discoveryengine_v1_RankingRecord_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.RankServiceProto
.internal_static_google_cloud_discoveryengine_v1_RankingRecord_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.RankingRecord.class,
com.google.cloud.discoveryengine.v1.RankingRecord.Builder.class);
}
// Construct using com.google.cloud.discoveryengine.v1.RankingRecord.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
id_ = "";
title_ = "";
content_ = "";
score_ = 0F;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1.RankServiceProto
.internal_static_google_cloud_discoveryengine_v1_RankingRecord_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.RankingRecord getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1.RankingRecord.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.RankingRecord build() {
com.google.cloud.discoveryengine.v1.RankingRecord result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.RankingRecord buildPartial() {
com.google.cloud.discoveryengine.v1.RankingRecord result =
new com.google.cloud.discoveryengine.v1.RankingRecord(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.discoveryengine.v1.RankingRecord result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.id_ = id_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.title_ = title_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.content_ = content_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.score_ = score_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.discoveryengine.v1.RankingRecord) {
return mergeFrom((com.google.cloud.discoveryengine.v1.RankingRecord) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.discoveryengine.v1.RankingRecord other) {
if (other == com.google.cloud.discoveryengine.v1.RankingRecord.getDefaultInstance())
return this;
if (!other.getId().isEmpty()) {
id_ = other.id_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getTitle().isEmpty()) {
title_ = other.title_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getContent().isEmpty()) {
content_ = other.content_;
bitField0_ |= 0x00000004;
onChanged();
}
if (other.getScore() != 0F) {
setScore(other.getScore());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
id_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
title_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
content_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 37:
{
score_ = input.readFloat();
bitField0_ |= 0x00000008;
break;
} // case 37
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object id_ = "";
/**
*
*
* <pre>
* The unique ID to represent the record.
* </pre>
*
* <code>string id = 1;</code>
*
* @return The id.
*/
public java.lang.String getId() {
java.lang.Object ref = id_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
id_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The unique ID to represent the record.
* </pre>
*
* <code>string id = 1;</code>
*
* @return The bytes for id.
*/
public com.google.protobuf.ByteString getIdBytes() {
java.lang.Object ref = id_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
id_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The unique ID to represent the record.
* </pre>
*
* <code>string id = 1;</code>
*
* @param value The id to set.
* @return This builder for chaining.
*/
public Builder setId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
id_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The unique ID to represent the record.
* </pre>
*
* <code>string id = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearId() {
id_ = getDefaultInstance().getId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The unique ID to represent the record.
* </pre>
*
* <code>string id = 1;</code>
*
* @param value The bytes for id to set.
* @return This builder for chaining.
*/
public Builder setIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
id_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object title_ = "";
/**
*
*
* <pre>
* The title of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1.RankingRecord.content] should be
* set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string title = 2;</code>
*
* @return The title.
*/
public java.lang.String getTitle() {
java.lang.Object ref = title_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
title_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The title of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1.RankingRecord.content] should be
* set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string title = 2;</code>
*
* @return The bytes for title.
*/
public com.google.protobuf.ByteString getTitleBytes() {
java.lang.Object ref = title_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
title_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The title of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1.RankingRecord.content] should be
* set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string title = 2;</code>
*
* @param value The title to set.
* @return This builder for chaining.
*/
public Builder setTitle(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
title_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The title of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1.RankingRecord.content] should be
* set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string title = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearTitle() {
title_ = getDefaultInstance().getTitle();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The title of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1.RankingRecord.content] should be
* set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string title = 2;</code>
*
* @param value The bytes for title to set.
* @return This builder for chaining.
*/
public Builder setTitleBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
title_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object content_ = "";
/**
*
*
* <pre>
* The content of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1.RankingRecord.content] should be
* set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string content = 3;</code>
*
* @return The content.
*/
public java.lang.String getContent() {
java.lang.Object ref = content_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
content_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The content of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1.RankingRecord.content] should be
* set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string content = 3;</code>
*
* @return The bytes for content.
*/
public com.google.protobuf.ByteString getContentBytes() {
java.lang.Object ref = content_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
content_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The content of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1.RankingRecord.content] should be
* set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string content = 3;</code>
*
* @param value The content to set.
* @return This builder for chaining.
*/
public Builder setContent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
content_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The content of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1.RankingRecord.content] should be
* set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string content = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearContent() {
content_ = getDefaultInstance().getContent();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The content of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1.RankingRecord.content] should be
* set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string content = 3;</code>
*
* @param value The bytes for content to set.
* @return This builder for chaining.
*/
public Builder setContentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
content_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private float score_;
/**
*
*
* <pre>
* The score of this record based on the given query and selected model.
* The score will be rounded to 2 decimal places. If the score is close to 0,
* it will be rounded to 0.0001 to avoid returning unset.
* </pre>
*
* <code>float score = 4;</code>
*
* @return The score.
*/
@java.lang.Override
public float getScore() {
return score_;
}
/**
*
*
* <pre>
* The score of this record based on the given query and selected model.
* The score will be rounded to 2 decimal places. If the score is close to 0,
* it will be rounded to 0.0001 to avoid returning unset.
* </pre>
*
* <code>float score = 4;</code>
*
* @param value The score to set.
* @return This builder for chaining.
*/
public Builder setScore(float value) {
score_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* The score of this record based on the given query and selected model.
* The score will be rounded to 2 decimal places. If the score is close to 0,
* it will be rounded to 0.0001 to avoid returning unset.
* </pre>
*
* <code>float score = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearScore() {
bitField0_ = (bitField0_ & ~0x00000008);
score_ = 0F;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1.RankingRecord)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1.RankingRecord)
private static final com.google.cloud.discoveryengine.v1.RankingRecord DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1.RankingRecord();
}
public static com.google.cloud.discoveryengine.v1.RankingRecord getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RankingRecord> PARSER =
new com.google.protobuf.AbstractParser<RankingRecord>() {
@java.lang.Override
public RankingRecord parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<RankingRecord> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RankingRecord> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.RankingRecord getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,818 | java-shopping-merchant-accounts/proto-google-shopping-merchant-accounts-v1/src/main/java/com/google/shopping/merchant/accounts/v1/UpdateAccountRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/shopping/merchant/accounts/v1/accounts.proto
// Protobuf Java Version: 3.25.8
package com.google.shopping.merchant.accounts.v1;
/**
*
*
* <pre>
* Request message for the `UpdateAccount` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1.UpdateAccountRequest}
*/
public final class UpdateAccountRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.shopping.merchant.accounts.v1.UpdateAccountRequest)
UpdateAccountRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateAccountRequest.newBuilder() to construct.
private UpdateAccountRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateAccountRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateAccountRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1.AccountsProto
.internal_static_google_shopping_merchant_accounts_v1_UpdateAccountRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1.AccountsProto
.internal_static_google_shopping_merchant_accounts_v1_UpdateAccountRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1.UpdateAccountRequest.class,
com.google.shopping.merchant.accounts.v1.UpdateAccountRequest.Builder.class);
}
private int bitField0_;
public static final int ACCOUNT_FIELD_NUMBER = 1;
private com.google.shopping.merchant.accounts.v1.Account account_;
/**
*
*
* <pre>
* Required. The new version of the account.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the account field is set.
*/
@java.lang.Override
public boolean hasAccount() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The new version of the account.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The account.
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.Account getAccount() {
return account_ == null
? com.google.shopping.merchant.accounts.v1.Account.getDefaultInstance()
: account_;
}
/**
*
*
* <pre>
* Required. The new version of the account.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.AccountOrBuilder getAccountOrBuilder() {
return account_ == null
? com.google.shopping.merchant.accounts.v1.Account.getDefaultInstance()
: account_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `account_name`
* - `adult_content`
* - `language_code`
* - `time_zone`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `account_name`
* - `adult_content`
* - `language_code`
* - `time_zone`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `account_name`
* - `adult_content`
* - `language_code`
* - `time_zone`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getAccount());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getAccount());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.shopping.merchant.accounts.v1.UpdateAccountRequest)) {
return super.equals(obj);
}
com.google.shopping.merchant.accounts.v1.UpdateAccountRequest other =
(com.google.shopping.merchant.accounts.v1.UpdateAccountRequest) obj;
if (hasAccount() != other.hasAccount()) return false;
if (hasAccount()) {
if (!getAccount().equals(other.getAccount())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAccount()) {
hash = (37 * hash) + ACCOUNT_FIELD_NUMBER;
hash = (53 * hash) + getAccount().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.shopping.merchant.accounts.v1.UpdateAccountRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1.UpdateAccountRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.UpdateAccountRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1.UpdateAccountRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.UpdateAccountRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1.UpdateAccountRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.UpdateAccountRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1.UpdateAccountRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.UpdateAccountRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1.UpdateAccountRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.UpdateAccountRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1.UpdateAccountRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.shopping.merchant.accounts.v1.UpdateAccountRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for the `UpdateAccount` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1.UpdateAccountRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.shopping.merchant.accounts.v1.UpdateAccountRequest)
com.google.shopping.merchant.accounts.v1.UpdateAccountRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1.AccountsProto
.internal_static_google_shopping_merchant_accounts_v1_UpdateAccountRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1.AccountsProto
.internal_static_google_shopping_merchant_accounts_v1_UpdateAccountRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1.UpdateAccountRequest.class,
com.google.shopping.merchant.accounts.v1.UpdateAccountRequest.Builder.class);
}
// Construct using com.google.shopping.merchant.accounts.v1.UpdateAccountRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getAccountFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
account_ = null;
if (accountBuilder_ != null) {
accountBuilder_.dispose();
accountBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.shopping.merchant.accounts.v1.AccountsProto
.internal_static_google_shopping_merchant_accounts_v1_UpdateAccountRequest_descriptor;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.UpdateAccountRequest
getDefaultInstanceForType() {
return com.google.shopping.merchant.accounts.v1.UpdateAccountRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.UpdateAccountRequest build() {
com.google.shopping.merchant.accounts.v1.UpdateAccountRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.UpdateAccountRequest buildPartial() {
com.google.shopping.merchant.accounts.v1.UpdateAccountRequest result =
new com.google.shopping.merchant.accounts.v1.UpdateAccountRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.shopping.merchant.accounts.v1.UpdateAccountRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.account_ = accountBuilder_ == null ? account_ : accountBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.shopping.merchant.accounts.v1.UpdateAccountRequest) {
return mergeFrom((com.google.shopping.merchant.accounts.v1.UpdateAccountRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.shopping.merchant.accounts.v1.UpdateAccountRequest other) {
if (other
== com.google.shopping.merchant.accounts.v1.UpdateAccountRequest.getDefaultInstance())
return this;
if (other.hasAccount()) {
mergeAccount(other.getAccount());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getAccountFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.shopping.merchant.accounts.v1.Account account_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1.Account,
com.google.shopping.merchant.accounts.v1.Account.Builder,
com.google.shopping.merchant.accounts.v1.AccountOrBuilder>
accountBuilder_;
/**
*
*
* <pre>
* Required. The new version of the account.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the account field is set.
*/
public boolean hasAccount() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The new version of the account.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The account.
*/
public com.google.shopping.merchant.accounts.v1.Account getAccount() {
if (accountBuilder_ == null) {
return account_ == null
? com.google.shopping.merchant.accounts.v1.Account.getDefaultInstance()
: account_;
} else {
return accountBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The new version of the account.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setAccount(com.google.shopping.merchant.accounts.v1.Account value) {
if (accountBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
account_ = value;
} else {
accountBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The new version of the account.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setAccount(
com.google.shopping.merchant.accounts.v1.Account.Builder builderForValue) {
if (accountBuilder_ == null) {
account_ = builderForValue.build();
} else {
accountBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The new version of the account.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeAccount(com.google.shopping.merchant.accounts.v1.Account value) {
if (accountBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& account_ != null
&& account_ != com.google.shopping.merchant.accounts.v1.Account.getDefaultInstance()) {
getAccountBuilder().mergeFrom(value);
} else {
account_ = value;
}
} else {
accountBuilder_.mergeFrom(value);
}
if (account_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The new version of the account.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearAccount() {
bitField0_ = (bitField0_ & ~0x00000001);
account_ = null;
if (accountBuilder_ != null) {
accountBuilder_.dispose();
accountBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The new version of the account.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.shopping.merchant.accounts.v1.Account.Builder getAccountBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getAccountFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The new version of the account.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.shopping.merchant.accounts.v1.AccountOrBuilder getAccountOrBuilder() {
if (accountBuilder_ != null) {
return accountBuilder_.getMessageOrBuilder();
} else {
return account_ == null
? com.google.shopping.merchant.accounts.v1.Account.getDefaultInstance()
: account_;
}
}
/**
*
*
* <pre>
* Required. The new version of the account.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1.Account,
com.google.shopping.merchant.accounts.v1.Account.Builder,
com.google.shopping.merchant.accounts.v1.AccountOrBuilder>
getAccountFieldBuilder() {
if (accountBuilder_ == null) {
accountBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1.Account,
com.google.shopping.merchant.accounts.v1.Account.Builder,
com.google.shopping.merchant.accounts.v1.AccountOrBuilder>(
getAccount(), getParentForChildren(), isClean());
account_ = null;
}
return accountBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `account_name`
* - `adult_content`
* - `language_code`
* - `time_zone`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `account_name`
* - `adult_content`
* - `language_code`
* - `time_zone`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `account_name`
* - `adult_content`
* - `language_code`
* - `time_zone`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `account_name`
* - `adult_content`
* - `language_code`
* - `time_zone`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `account_name`
* - `adult_content`
* - `language_code`
* - `time_zone`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `account_name`
* - `adult_content`
* - `language_code`
* - `time_zone`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `account_name`
* - `adult_content`
* - `language_code`
* - `time_zone`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `account_name`
* - `adult_content`
* - `language_code`
* - `time_zone`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Optional. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `account_name`
* - `adult_content`
* - `language_code`
* - `time_zone`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.shopping.merchant.accounts.v1.UpdateAccountRequest)
}
// @@protoc_insertion_point(class_scope:google.shopping.merchant.accounts.v1.UpdateAccountRequest)
private static final com.google.shopping.merchant.accounts.v1.UpdateAccountRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.shopping.merchant.accounts.v1.UpdateAccountRequest();
}
public static com.google.shopping.merchant.accounts.v1.UpdateAccountRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateAccountRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateAccountRequest>() {
@java.lang.Override
public UpdateAccountRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateAccountRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateAccountRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.UpdateAccountRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,838 | java-monitoring/proto-google-cloud-monitoring-v3/src/main/java/com/google/monitoring/v3/ListServicesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/monitoring/v3/service_service.proto
// Protobuf Java Version: 3.25.8
package com.google.monitoring.v3;
/**
*
*
* <pre>
* The `ListServices` response.
* </pre>
*
* Protobuf type {@code google.monitoring.v3.ListServicesResponse}
*/
public final class ListServicesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.monitoring.v3.ListServicesResponse)
ListServicesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListServicesResponse.newBuilder() to construct.
private ListServicesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListServicesResponse() {
services_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListServicesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.monitoring.v3.ServiceMonitoringServiceProto
.internal_static_google_monitoring_v3_ListServicesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.v3.ServiceMonitoringServiceProto
.internal_static_google_monitoring_v3_ListServicesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.v3.ListServicesResponse.class,
com.google.monitoring.v3.ListServicesResponse.Builder.class);
}
public static final int SERVICES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.monitoring.v3.Service> services_;
/**
*
*
* <pre>
* The `Service`s matching the specified filter.
* </pre>
*
* <code>repeated .google.monitoring.v3.Service services = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.monitoring.v3.Service> getServicesList() {
return services_;
}
/**
*
*
* <pre>
* The `Service`s matching the specified filter.
* </pre>
*
* <code>repeated .google.monitoring.v3.Service services = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.monitoring.v3.ServiceOrBuilder>
getServicesOrBuilderList() {
return services_;
}
/**
*
*
* <pre>
* The `Service`s matching the specified filter.
* </pre>
*
* <code>repeated .google.monitoring.v3.Service services = 1;</code>
*/
@java.lang.Override
public int getServicesCount() {
return services_.size();
}
/**
*
*
* <pre>
* The `Service`s matching the specified filter.
* </pre>
*
* <code>repeated .google.monitoring.v3.Service services = 1;</code>
*/
@java.lang.Override
public com.google.monitoring.v3.Service getServices(int index) {
return services_.get(index);
}
/**
*
*
* <pre>
* The `Service`s matching the specified filter.
* </pre>
*
* <code>repeated .google.monitoring.v3.Service services = 1;</code>
*/
@java.lang.Override
public com.google.monitoring.v3.ServiceOrBuilder getServicesOrBuilder(int index) {
return services_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* If there are more results than have been returned, then this field is set
* to a non-empty value. To see the additional results,
* use that value as `page_token` in the next call to this method.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* If there are more results than have been returned, then this field is set
* to a non-empty value. To see the additional results,
* use that value as `page_token` in the next call to this method.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < services_.size(); i++) {
output.writeMessage(1, services_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < services_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, services_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.monitoring.v3.ListServicesResponse)) {
return super.equals(obj);
}
com.google.monitoring.v3.ListServicesResponse other =
(com.google.monitoring.v3.ListServicesResponse) obj;
if (!getServicesList().equals(other.getServicesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getServicesCount() > 0) {
hash = (37 * hash) + SERVICES_FIELD_NUMBER;
hash = (53 * hash) + getServicesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.monitoring.v3.ListServicesResponse parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.ListServicesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.ListServicesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.ListServicesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.ListServicesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.ListServicesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.ListServicesResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.ListServicesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.monitoring.v3.ListServicesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.ListServicesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.monitoring.v3.ListServicesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.ListServicesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.monitoring.v3.ListServicesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The `ListServices` response.
* </pre>
*
* Protobuf type {@code google.monitoring.v3.ListServicesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.monitoring.v3.ListServicesResponse)
com.google.monitoring.v3.ListServicesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.monitoring.v3.ServiceMonitoringServiceProto
.internal_static_google_monitoring_v3_ListServicesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.v3.ServiceMonitoringServiceProto
.internal_static_google_monitoring_v3_ListServicesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.v3.ListServicesResponse.class,
com.google.monitoring.v3.ListServicesResponse.Builder.class);
}
// Construct using com.google.monitoring.v3.ListServicesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (servicesBuilder_ == null) {
services_ = java.util.Collections.emptyList();
} else {
services_ = null;
servicesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.monitoring.v3.ServiceMonitoringServiceProto
.internal_static_google_monitoring_v3_ListServicesResponse_descriptor;
}
@java.lang.Override
public com.google.monitoring.v3.ListServicesResponse getDefaultInstanceForType() {
return com.google.monitoring.v3.ListServicesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.monitoring.v3.ListServicesResponse build() {
com.google.monitoring.v3.ListServicesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.monitoring.v3.ListServicesResponse buildPartial() {
com.google.monitoring.v3.ListServicesResponse result =
new com.google.monitoring.v3.ListServicesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.monitoring.v3.ListServicesResponse result) {
if (servicesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
services_ = java.util.Collections.unmodifiableList(services_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.services_ = services_;
} else {
result.services_ = servicesBuilder_.build();
}
}
private void buildPartial0(com.google.monitoring.v3.ListServicesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.monitoring.v3.ListServicesResponse) {
return mergeFrom((com.google.monitoring.v3.ListServicesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.monitoring.v3.ListServicesResponse other) {
if (other == com.google.monitoring.v3.ListServicesResponse.getDefaultInstance()) return this;
if (servicesBuilder_ == null) {
if (!other.services_.isEmpty()) {
if (services_.isEmpty()) {
services_ = other.services_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureServicesIsMutable();
services_.addAll(other.services_);
}
onChanged();
}
} else {
if (!other.services_.isEmpty()) {
if (servicesBuilder_.isEmpty()) {
servicesBuilder_.dispose();
servicesBuilder_ = null;
services_ = other.services_;
bitField0_ = (bitField0_ & ~0x00000001);
servicesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getServicesFieldBuilder()
: null;
} else {
servicesBuilder_.addAllMessages(other.services_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.monitoring.v3.Service m =
input.readMessage(com.google.monitoring.v3.Service.parser(), extensionRegistry);
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.add(m);
} else {
servicesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.monitoring.v3.Service> services_ =
java.util.Collections.emptyList();
private void ensureServicesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
services_ = new java.util.ArrayList<com.google.monitoring.v3.Service>(services_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.monitoring.v3.Service,
com.google.monitoring.v3.Service.Builder,
com.google.monitoring.v3.ServiceOrBuilder>
servicesBuilder_;
/**
*
*
* <pre>
* The `Service`s matching the specified filter.
* </pre>
*
* <code>repeated .google.monitoring.v3.Service services = 1;</code>
*/
public java.util.List<com.google.monitoring.v3.Service> getServicesList() {
if (servicesBuilder_ == null) {
return java.util.Collections.unmodifiableList(services_);
} else {
return servicesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The `Service`s matching the specified filter.
* </pre>
*
* <code>repeated .google.monitoring.v3.Service services = 1;</code>
*/
public int getServicesCount() {
if (servicesBuilder_ == null) {
return services_.size();
} else {
return servicesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The `Service`s matching the specified filter.
* </pre>
*
* <code>repeated .google.monitoring.v3.Service services = 1;</code>
*/
public com.google.monitoring.v3.Service getServices(int index) {
if (servicesBuilder_ == null) {
return services_.get(index);
} else {
return servicesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The `Service`s matching the specified filter.
* </pre>
*
* <code>repeated .google.monitoring.v3.Service services = 1;</code>
*/
public Builder setServices(int index, com.google.monitoring.v3.Service value) {
if (servicesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServicesIsMutable();
services_.set(index, value);
onChanged();
} else {
servicesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The `Service`s matching the specified filter.
* </pre>
*
* <code>repeated .google.monitoring.v3.Service services = 1;</code>
*/
public Builder setServices(
int index, com.google.monitoring.v3.Service.Builder builderForValue) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.set(index, builderForValue.build());
onChanged();
} else {
servicesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The `Service`s matching the specified filter.
* </pre>
*
* <code>repeated .google.monitoring.v3.Service services = 1;</code>
*/
public Builder addServices(com.google.monitoring.v3.Service value) {
if (servicesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServicesIsMutable();
services_.add(value);
onChanged();
} else {
servicesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The `Service`s matching the specified filter.
* </pre>
*
* <code>repeated .google.monitoring.v3.Service services = 1;</code>
*/
public Builder addServices(int index, com.google.monitoring.v3.Service value) {
if (servicesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServicesIsMutable();
services_.add(index, value);
onChanged();
} else {
servicesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The `Service`s matching the specified filter.
* </pre>
*
* <code>repeated .google.monitoring.v3.Service services = 1;</code>
*/
public Builder addServices(com.google.monitoring.v3.Service.Builder builderForValue) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.add(builderForValue.build());
onChanged();
} else {
servicesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The `Service`s matching the specified filter.
* </pre>
*
* <code>repeated .google.monitoring.v3.Service services = 1;</code>
*/
public Builder addServices(
int index, com.google.monitoring.v3.Service.Builder builderForValue) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.add(index, builderForValue.build());
onChanged();
} else {
servicesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The `Service`s matching the specified filter.
* </pre>
*
* <code>repeated .google.monitoring.v3.Service services = 1;</code>
*/
public Builder addAllServices(
java.lang.Iterable<? extends com.google.monitoring.v3.Service> values) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, services_);
onChanged();
} else {
servicesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The `Service`s matching the specified filter.
* </pre>
*
* <code>repeated .google.monitoring.v3.Service services = 1;</code>
*/
public Builder clearServices() {
if (servicesBuilder_ == null) {
services_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
servicesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The `Service`s matching the specified filter.
* </pre>
*
* <code>repeated .google.monitoring.v3.Service services = 1;</code>
*/
public Builder removeServices(int index) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.remove(index);
onChanged();
} else {
servicesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The `Service`s matching the specified filter.
* </pre>
*
* <code>repeated .google.monitoring.v3.Service services = 1;</code>
*/
public com.google.monitoring.v3.Service.Builder getServicesBuilder(int index) {
return getServicesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The `Service`s matching the specified filter.
* </pre>
*
* <code>repeated .google.monitoring.v3.Service services = 1;</code>
*/
public com.google.monitoring.v3.ServiceOrBuilder getServicesOrBuilder(int index) {
if (servicesBuilder_ == null) {
return services_.get(index);
} else {
return servicesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The `Service`s matching the specified filter.
* </pre>
*
* <code>repeated .google.monitoring.v3.Service services = 1;</code>
*/
public java.util.List<? extends com.google.monitoring.v3.ServiceOrBuilder>
getServicesOrBuilderList() {
if (servicesBuilder_ != null) {
return servicesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(services_);
}
}
/**
*
*
* <pre>
* The `Service`s matching the specified filter.
* </pre>
*
* <code>repeated .google.monitoring.v3.Service services = 1;</code>
*/
public com.google.monitoring.v3.Service.Builder addServicesBuilder() {
return getServicesFieldBuilder()
.addBuilder(com.google.monitoring.v3.Service.getDefaultInstance());
}
/**
*
*
* <pre>
* The `Service`s matching the specified filter.
* </pre>
*
* <code>repeated .google.monitoring.v3.Service services = 1;</code>
*/
public com.google.monitoring.v3.Service.Builder addServicesBuilder(int index) {
return getServicesFieldBuilder()
.addBuilder(index, com.google.monitoring.v3.Service.getDefaultInstance());
}
/**
*
*
* <pre>
* The `Service`s matching the specified filter.
* </pre>
*
* <code>repeated .google.monitoring.v3.Service services = 1;</code>
*/
public java.util.List<com.google.monitoring.v3.Service.Builder> getServicesBuilderList() {
return getServicesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.monitoring.v3.Service,
com.google.monitoring.v3.Service.Builder,
com.google.monitoring.v3.ServiceOrBuilder>
getServicesFieldBuilder() {
if (servicesBuilder_ == null) {
servicesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.monitoring.v3.Service,
com.google.monitoring.v3.Service.Builder,
com.google.monitoring.v3.ServiceOrBuilder>(
services_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
services_ = null;
}
return servicesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* If there are more results than have been returned, then this field is set
* to a non-empty value. To see the additional results,
* use that value as `page_token` in the next call to this method.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* If there are more results than have been returned, then this field is set
* to a non-empty value. To see the additional results,
* use that value as `page_token` in the next call to this method.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* If there are more results than have been returned, then this field is set
* to a non-empty value. To see the additional results,
* use that value as `page_token` in the next call to this method.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* If there are more results than have been returned, then this field is set
* to a non-empty value. To see the additional results,
* use that value as `page_token` in the next call to this method.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* If there are more results than have been returned, then this field is set
* to a non-empty value. To see the additional results,
* use that value as `page_token` in the next call to this method.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.monitoring.v3.ListServicesResponse)
}
// @@protoc_insertion_point(class_scope:google.monitoring.v3.ListServicesResponse)
private static final com.google.monitoring.v3.ListServicesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.monitoring.v3.ListServicesResponse();
}
public static com.google.monitoring.v3.ListServicesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListServicesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListServicesResponse>() {
@java.lang.Override
public ListServicesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListServicesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListServicesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.monitoring.v3.ListServicesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,795 | java-asset/proto-google-cloud-asset-v1p7beta1/src/main/java/com/google/cloud/asset/v1p7beta1/GcsDestination.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/asset/v1p7beta1/asset_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.asset.v1p7beta1;
/**
*
*
* <pre>
* A Cloud Storage location.
* </pre>
*
* Protobuf type {@code google.cloud.asset.v1p7beta1.GcsDestination}
*/
public final class GcsDestination extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.asset.v1p7beta1.GcsDestination)
GcsDestinationOrBuilder {
private static final long serialVersionUID = 0L;
// Use GcsDestination.newBuilder() to construct.
private GcsDestination(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GcsDestination() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new GcsDestination();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.asset.v1p7beta1.AssetServiceProto
.internal_static_google_cloud_asset_v1p7beta1_GcsDestination_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.asset.v1p7beta1.AssetServiceProto
.internal_static_google_cloud_asset_v1p7beta1_GcsDestination_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.asset.v1p7beta1.GcsDestination.class,
com.google.cloud.asset.v1p7beta1.GcsDestination.Builder.class);
}
private int objectUriCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object objectUri_;
public enum ObjectUriCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
URI(1),
URI_PREFIX(2),
OBJECTURI_NOT_SET(0);
private final int value;
private ObjectUriCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static ObjectUriCase valueOf(int value) {
return forNumber(value);
}
public static ObjectUriCase forNumber(int value) {
switch (value) {
case 1:
return URI;
case 2:
return URI_PREFIX;
case 0:
return OBJECTURI_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public ObjectUriCase getObjectUriCase() {
return ObjectUriCase.forNumber(objectUriCase_);
}
public static final int URI_FIELD_NUMBER = 1;
/**
*
*
* <pre>
* The URI of the Cloud Storage object. It's the same URI that is used by
* gsutil. Example: "gs://bucket_name/object_name". See [Viewing and
* Editing Object
* Metadata](https://cloud.google.com/storage/docs/viewing-editing-metadata)
* for more information.
* </pre>
*
* <code>string uri = 1;</code>
*
* @return Whether the uri field is set.
*/
public boolean hasUri() {
return objectUriCase_ == 1;
}
/**
*
*
* <pre>
* The URI of the Cloud Storage object. It's the same URI that is used by
* gsutil. Example: "gs://bucket_name/object_name". See [Viewing and
* Editing Object
* Metadata](https://cloud.google.com/storage/docs/viewing-editing-metadata)
* for more information.
* </pre>
*
* <code>string uri = 1;</code>
*
* @return The uri.
*/
public java.lang.String getUri() {
java.lang.Object ref = "";
if (objectUriCase_ == 1) {
ref = objectUri_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (objectUriCase_ == 1) {
objectUri_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* The URI of the Cloud Storage object. It's the same URI that is used by
* gsutil. Example: "gs://bucket_name/object_name". See [Viewing and
* Editing Object
* Metadata](https://cloud.google.com/storage/docs/viewing-editing-metadata)
* for more information.
* </pre>
*
* <code>string uri = 1;</code>
*
* @return The bytes for uri.
*/
public com.google.protobuf.ByteString getUriBytes() {
java.lang.Object ref = "";
if (objectUriCase_ == 1) {
ref = objectUri_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (objectUriCase_ == 1) {
objectUri_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int URI_PREFIX_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* The URI prefix of all generated Cloud Storage objects. Example:
* "gs://bucket_name/object_name_prefix". Each object URI is in format:
* "gs://bucket_name/object_name_prefix/{ASSET_TYPE}/{SHARD_NUMBER} and only
* contains assets for that type. <shard number> starts from 0. Example:
* "gs://bucket_name/object_name_prefix/compute.googleapis.com/Disk/0" is
* the first shard of output objects containing all
* compute.googleapis.com/Disk assets. An INVALID_ARGUMENT error will be
* returned if file with the same name "gs://bucket_name/object_name_prefix"
* already exists.
* </pre>
*
* <code>string uri_prefix = 2;</code>
*
* @return Whether the uriPrefix field is set.
*/
public boolean hasUriPrefix() {
return objectUriCase_ == 2;
}
/**
*
*
* <pre>
* The URI prefix of all generated Cloud Storage objects. Example:
* "gs://bucket_name/object_name_prefix". Each object URI is in format:
* "gs://bucket_name/object_name_prefix/{ASSET_TYPE}/{SHARD_NUMBER} and only
* contains assets for that type. <shard number> starts from 0. Example:
* "gs://bucket_name/object_name_prefix/compute.googleapis.com/Disk/0" is
* the first shard of output objects containing all
* compute.googleapis.com/Disk assets. An INVALID_ARGUMENT error will be
* returned if file with the same name "gs://bucket_name/object_name_prefix"
* already exists.
* </pre>
*
* <code>string uri_prefix = 2;</code>
*
* @return The uriPrefix.
*/
public java.lang.String getUriPrefix() {
java.lang.Object ref = "";
if (objectUriCase_ == 2) {
ref = objectUri_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (objectUriCase_ == 2) {
objectUri_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* The URI prefix of all generated Cloud Storage objects. Example:
* "gs://bucket_name/object_name_prefix". Each object URI is in format:
* "gs://bucket_name/object_name_prefix/{ASSET_TYPE}/{SHARD_NUMBER} and only
* contains assets for that type. <shard number> starts from 0. Example:
* "gs://bucket_name/object_name_prefix/compute.googleapis.com/Disk/0" is
* the first shard of output objects containing all
* compute.googleapis.com/Disk assets. An INVALID_ARGUMENT error will be
* returned if file with the same name "gs://bucket_name/object_name_prefix"
* already exists.
* </pre>
*
* <code>string uri_prefix = 2;</code>
*
* @return The bytes for uriPrefix.
*/
public com.google.protobuf.ByteString getUriPrefixBytes() {
java.lang.Object ref = "";
if (objectUriCase_ == 2) {
ref = objectUri_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (objectUriCase_ == 2) {
objectUri_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (objectUriCase_ == 1) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, objectUri_);
}
if (objectUriCase_ == 2) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, objectUri_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (objectUriCase_ == 1) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, objectUri_);
}
if (objectUriCase_ == 2) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, objectUri_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.asset.v1p7beta1.GcsDestination)) {
return super.equals(obj);
}
com.google.cloud.asset.v1p7beta1.GcsDestination other =
(com.google.cloud.asset.v1p7beta1.GcsDestination) obj;
if (!getObjectUriCase().equals(other.getObjectUriCase())) return false;
switch (objectUriCase_) {
case 1:
if (!getUri().equals(other.getUri())) return false;
break;
case 2:
if (!getUriPrefix().equals(other.getUriPrefix())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
switch (objectUriCase_) {
case 1:
hash = (37 * hash) + URI_FIELD_NUMBER;
hash = (53 * hash) + getUri().hashCode();
break;
case 2:
hash = (37 * hash) + URI_PREFIX_FIELD_NUMBER;
hash = (53 * hash) + getUriPrefix().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.asset.v1p7beta1.GcsDestination parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1p7beta1.GcsDestination parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1p7beta1.GcsDestination parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1p7beta1.GcsDestination parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1p7beta1.GcsDestination parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1p7beta1.GcsDestination parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1p7beta1.GcsDestination parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1p7beta1.GcsDestination parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.asset.v1p7beta1.GcsDestination parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1p7beta1.GcsDestination parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.asset.v1p7beta1.GcsDestination parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1p7beta1.GcsDestination parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.asset.v1p7beta1.GcsDestination prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A Cloud Storage location.
* </pre>
*
* Protobuf type {@code google.cloud.asset.v1p7beta1.GcsDestination}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.asset.v1p7beta1.GcsDestination)
com.google.cloud.asset.v1p7beta1.GcsDestinationOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.asset.v1p7beta1.AssetServiceProto
.internal_static_google_cloud_asset_v1p7beta1_GcsDestination_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.asset.v1p7beta1.AssetServiceProto
.internal_static_google_cloud_asset_v1p7beta1_GcsDestination_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.asset.v1p7beta1.GcsDestination.class,
com.google.cloud.asset.v1p7beta1.GcsDestination.Builder.class);
}
// Construct using com.google.cloud.asset.v1p7beta1.GcsDestination.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
objectUriCase_ = 0;
objectUri_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.asset.v1p7beta1.AssetServiceProto
.internal_static_google_cloud_asset_v1p7beta1_GcsDestination_descriptor;
}
@java.lang.Override
public com.google.cloud.asset.v1p7beta1.GcsDestination getDefaultInstanceForType() {
return com.google.cloud.asset.v1p7beta1.GcsDestination.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.asset.v1p7beta1.GcsDestination build() {
com.google.cloud.asset.v1p7beta1.GcsDestination result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.asset.v1p7beta1.GcsDestination buildPartial() {
com.google.cloud.asset.v1p7beta1.GcsDestination result =
new com.google.cloud.asset.v1p7beta1.GcsDestination(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.asset.v1p7beta1.GcsDestination result) {
int from_bitField0_ = bitField0_;
}
private void buildPartialOneofs(com.google.cloud.asset.v1p7beta1.GcsDestination result) {
result.objectUriCase_ = objectUriCase_;
result.objectUri_ = this.objectUri_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.asset.v1p7beta1.GcsDestination) {
return mergeFrom((com.google.cloud.asset.v1p7beta1.GcsDestination) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.asset.v1p7beta1.GcsDestination other) {
if (other == com.google.cloud.asset.v1p7beta1.GcsDestination.getDefaultInstance())
return this;
switch (other.getObjectUriCase()) {
case URI:
{
objectUriCase_ = 1;
objectUri_ = other.objectUri_;
onChanged();
break;
}
case URI_PREFIX:
{
objectUriCase_ = 2;
objectUri_ = other.objectUri_;
onChanged();
break;
}
case OBJECTURI_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
objectUriCase_ = 1;
objectUri_ = s;
break;
} // case 10
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
objectUriCase_ = 2;
objectUri_ = s;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int objectUriCase_ = 0;
private java.lang.Object objectUri_;
public ObjectUriCase getObjectUriCase() {
return ObjectUriCase.forNumber(objectUriCase_);
}
public Builder clearObjectUri() {
objectUriCase_ = 0;
objectUri_ = null;
onChanged();
return this;
}
private int bitField0_;
/**
*
*
* <pre>
* The URI of the Cloud Storage object. It's the same URI that is used by
* gsutil. Example: "gs://bucket_name/object_name". See [Viewing and
* Editing Object
* Metadata](https://cloud.google.com/storage/docs/viewing-editing-metadata)
* for more information.
* </pre>
*
* <code>string uri = 1;</code>
*
* @return Whether the uri field is set.
*/
@java.lang.Override
public boolean hasUri() {
return objectUriCase_ == 1;
}
/**
*
*
* <pre>
* The URI of the Cloud Storage object. It's the same URI that is used by
* gsutil. Example: "gs://bucket_name/object_name". See [Viewing and
* Editing Object
* Metadata](https://cloud.google.com/storage/docs/viewing-editing-metadata)
* for more information.
* </pre>
*
* <code>string uri = 1;</code>
*
* @return The uri.
*/
@java.lang.Override
public java.lang.String getUri() {
java.lang.Object ref = "";
if (objectUriCase_ == 1) {
ref = objectUri_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (objectUriCase_ == 1) {
objectUri_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The URI of the Cloud Storage object. It's the same URI that is used by
* gsutil. Example: "gs://bucket_name/object_name". See [Viewing and
* Editing Object
* Metadata](https://cloud.google.com/storage/docs/viewing-editing-metadata)
* for more information.
* </pre>
*
* <code>string uri = 1;</code>
*
* @return The bytes for uri.
*/
@java.lang.Override
public com.google.protobuf.ByteString getUriBytes() {
java.lang.Object ref = "";
if (objectUriCase_ == 1) {
ref = objectUri_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (objectUriCase_ == 1) {
objectUri_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The URI of the Cloud Storage object. It's the same URI that is used by
* gsutil. Example: "gs://bucket_name/object_name". See [Viewing and
* Editing Object
* Metadata](https://cloud.google.com/storage/docs/viewing-editing-metadata)
* for more information.
* </pre>
*
* <code>string uri = 1;</code>
*
* @param value The uri to set.
* @return This builder for chaining.
*/
public Builder setUri(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
objectUriCase_ = 1;
objectUri_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The URI of the Cloud Storage object. It's the same URI that is used by
* gsutil. Example: "gs://bucket_name/object_name". See [Viewing and
* Editing Object
* Metadata](https://cloud.google.com/storage/docs/viewing-editing-metadata)
* for more information.
* </pre>
*
* <code>string uri = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearUri() {
if (objectUriCase_ == 1) {
objectUriCase_ = 0;
objectUri_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The URI of the Cloud Storage object. It's the same URI that is used by
* gsutil. Example: "gs://bucket_name/object_name". See [Viewing and
* Editing Object
* Metadata](https://cloud.google.com/storage/docs/viewing-editing-metadata)
* for more information.
* </pre>
*
* <code>string uri = 1;</code>
*
* @param value The bytes for uri to set.
* @return This builder for chaining.
*/
public Builder setUriBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
objectUriCase_ = 1;
objectUri_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The URI prefix of all generated Cloud Storage objects. Example:
* "gs://bucket_name/object_name_prefix". Each object URI is in format:
* "gs://bucket_name/object_name_prefix/{ASSET_TYPE}/{SHARD_NUMBER} and only
* contains assets for that type. <shard number> starts from 0. Example:
* "gs://bucket_name/object_name_prefix/compute.googleapis.com/Disk/0" is
* the first shard of output objects containing all
* compute.googleapis.com/Disk assets. An INVALID_ARGUMENT error will be
* returned if file with the same name "gs://bucket_name/object_name_prefix"
* already exists.
* </pre>
*
* <code>string uri_prefix = 2;</code>
*
* @return Whether the uriPrefix field is set.
*/
@java.lang.Override
public boolean hasUriPrefix() {
return objectUriCase_ == 2;
}
/**
*
*
* <pre>
* The URI prefix of all generated Cloud Storage objects. Example:
* "gs://bucket_name/object_name_prefix". Each object URI is in format:
* "gs://bucket_name/object_name_prefix/{ASSET_TYPE}/{SHARD_NUMBER} and only
* contains assets for that type. <shard number> starts from 0. Example:
* "gs://bucket_name/object_name_prefix/compute.googleapis.com/Disk/0" is
* the first shard of output objects containing all
* compute.googleapis.com/Disk assets. An INVALID_ARGUMENT error will be
* returned if file with the same name "gs://bucket_name/object_name_prefix"
* already exists.
* </pre>
*
* <code>string uri_prefix = 2;</code>
*
* @return The uriPrefix.
*/
@java.lang.Override
public java.lang.String getUriPrefix() {
java.lang.Object ref = "";
if (objectUriCase_ == 2) {
ref = objectUri_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (objectUriCase_ == 2) {
objectUri_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The URI prefix of all generated Cloud Storage objects. Example:
* "gs://bucket_name/object_name_prefix". Each object URI is in format:
* "gs://bucket_name/object_name_prefix/{ASSET_TYPE}/{SHARD_NUMBER} and only
* contains assets for that type. <shard number> starts from 0. Example:
* "gs://bucket_name/object_name_prefix/compute.googleapis.com/Disk/0" is
* the first shard of output objects containing all
* compute.googleapis.com/Disk assets. An INVALID_ARGUMENT error will be
* returned if file with the same name "gs://bucket_name/object_name_prefix"
* already exists.
* </pre>
*
* <code>string uri_prefix = 2;</code>
*
* @return The bytes for uriPrefix.
*/
@java.lang.Override
public com.google.protobuf.ByteString getUriPrefixBytes() {
java.lang.Object ref = "";
if (objectUriCase_ == 2) {
ref = objectUri_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (objectUriCase_ == 2) {
objectUri_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The URI prefix of all generated Cloud Storage objects. Example:
* "gs://bucket_name/object_name_prefix". Each object URI is in format:
* "gs://bucket_name/object_name_prefix/{ASSET_TYPE}/{SHARD_NUMBER} and only
* contains assets for that type. <shard number> starts from 0. Example:
* "gs://bucket_name/object_name_prefix/compute.googleapis.com/Disk/0" is
* the first shard of output objects containing all
* compute.googleapis.com/Disk assets. An INVALID_ARGUMENT error will be
* returned if file with the same name "gs://bucket_name/object_name_prefix"
* already exists.
* </pre>
*
* <code>string uri_prefix = 2;</code>
*
* @param value The uriPrefix to set.
* @return This builder for chaining.
*/
public Builder setUriPrefix(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
objectUriCase_ = 2;
objectUri_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The URI prefix of all generated Cloud Storage objects. Example:
* "gs://bucket_name/object_name_prefix". Each object URI is in format:
* "gs://bucket_name/object_name_prefix/{ASSET_TYPE}/{SHARD_NUMBER} and only
* contains assets for that type. <shard number> starts from 0. Example:
* "gs://bucket_name/object_name_prefix/compute.googleapis.com/Disk/0" is
* the first shard of output objects containing all
* compute.googleapis.com/Disk assets. An INVALID_ARGUMENT error will be
* returned if file with the same name "gs://bucket_name/object_name_prefix"
* already exists.
* </pre>
*
* <code>string uri_prefix = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearUriPrefix() {
if (objectUriCase_ == 2) {
objectUriCase_ = 0;
objectUri_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The URI prefix of all generated Cloud Storage objects. Example:
* "gs://bucket_name/object_name_prefix". Each object URI is in format:
* "gs://bucket_name/object_name_prefix/{ASSET_TYPE}/{SHARD_NUMBER} and only
* contains assets for that type. <shard number> starts from 0. Example:
* "gs://bucket_name/object_name_prefix/compute.googleapis.com/Disk/0" is
* the first shard of output objects containing all
* compute.googleapis.com/Disk assets. An INVALID_ARGUMENT error will be
* returned if file with the same name "gs://bucket_name/object_name_prefix"
* already exists.
* </pre>
*
* <code>string uri_prefix = 2;</code>
*
* @param value The bytes for uriPrefix to set.
* @return This builder for chaining.
*/
public Builder setUriPrefixBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
objectUriCase_ = 2;
objectUri_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.asset.v1p7beta1.GcsDestination)
}
// @@protoc_insertion_point(class_scope:google.cloud.asset.v1p7beta1.GcsDestination)
private static final com.google.cloud.asset.v1p7beta1.GcsDestination DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.asset.v1p7beta1.GcsDestination();
}
public static com.google.cloud.asset.v1p7beta1.GcsDestination getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GcsDestination> PARSER =
new com.google.protobuf.AbstractParser<GcsDestination>() {
@java.lang.Override
public GcsDestination parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<GcsDestination> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GcsDestination> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.asset.v1p7beta1.GcsDestination getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,902 | java-analytics-data/proto-google-analytics-data-v1alpha/src/main/java/com/google/analytics/data/v1alpha/CohortsRange.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/data/v1alpha/data.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.data.v1alpha;
/**
*
*
* <pre>
* Configures the extended reporting date range for a cohort report. Specifies
* an offset duration to follow the cohorts over.
* </pre>
*
* Protobuf type {@code google.analytics.data.v1alpha.CohortsRange}
*/
public final class CohortsRange extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.data.v1alpha.CohortsRange)
CohortsRangeOrBuilder {
private static final long serialVersionUID = 0L;
// Use CohortsRange.newBuilder() to construct.
private CohortsRange(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CohortsRange() {
granularity_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CohortsRange();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_CohortsRange_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_CohortsRange_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.data.v1alpha.CohortsRange.class,
com.google.analytics.data.v1alpha.CohortsRange.Builder.class);
}
/**
*
*
* <pre>
* The granularity used to interpret the `startOffset` and `endOffset` for the
* extended reporting date range for a cohort report.
* </pre>
*
* Protobuf enum {@code google.analytics.data.v1alpha.CohortsRange.Granularity}
*/
public enum Granularity implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Should never be specified.
* </pre>
*
* <code>GRANULARITY_UNSPECIFIED = 0;</code>
*/
GRANULARITY_UNSPECIFIED(0),
/**
*
*
* <pre>
* Daily granularity. Commonly used if the cohort's `dateRange` is a single
* day and the request contains `cohortNthDay`.
* </pre>
*
* <code>DAILY = 1;</code>
*/
DAILY(1),
/**
*
*
* <pre>
* Weekly granularity. Commonly used if the cohort's `dateRange` is a week
* in duration (starting on Sunday and ending on Saturday) and the request
* contains `cohortNthWeek`.
* </pre>
*
* <code>WEEKLY = 2;</code>
*/
WEEKLY(2),
/**
*
*
* <pre>
* Monthly granularity. Commonly used if the cohort's `dateRange` is a month
* in duration and the request contains `cohortNthMonth`.
* </pre>
*
* <code>MONTHLY = 3;</code>
*/
MONTHLY(3),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Should never be specified.
* </pre>
*
* <code>GRANULARITY_UNSPECIFIED = 0;</code>
*/
public static final int GRANULARITY_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Daily granularity. Commonly used if the cohort's `dateRange` is a single
* day and the request contains `cohortNthDay`.
* </pre>
*
* <code>DAILY = 1;</code>
*/
public static final int DAILY_VALUE = 1;
/**
*
*
* <pre>
* Weekly granularity. Commonly used if the cohort's `dateRange` is a week
* in duration (starting on Sunday and ending on Saturday) and the request
* contains `cohortNthWeek`.
* </pre>
*
* <code>WEEKLY = 2;</code>
*/
public static final int WEEKLY_VALUE = 2;
/**
*
*
* <pre>
* Monthly granularity. Commonly used if the cohort's `dateRange` is a month
* in duration and the request contains `cohortNthMonth`.
* </pre>
*
* <code>MONTHLY = 3;</code>
*/
public static final int MONTHLY_VALUE = 3;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static Granularity valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static Granularity forNumber(int value) {
switch (value) {
case 0:
return GRANULARITY_UNSPECIFIED;
case 1:
return DAILY;
case 2:
return WEEKLY;
case 3:
return MONTHLY;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Granularity> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<Granularity> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Granularity>() {
public Granularity findValueByNumber(int number) {
return Granularity.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.analytics.data.v1alpha.CohortsRange.getDescriptor().getEnumTypes().get(0);
}
private static final Granularity[] VALUES = values();
public static Granularity valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private Granularity(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.analytics.data.v1alpha.CohortsRange.Granularity)
}
public static final int GRANULARITY_FIELD_NUMBER = 1;
private int granularity_ = 0;
/**
*
*
* <pre>
* Required. The granularity used to interpret the `startOffset` and
* `endOffset` for the extended reporting date range for a cohort report.
* </pre>
*
* <code>.google.analytics.data.v1alpha.CohortsRange.Granularity granularity = 1;</code>
*
* @return The enum numeric value on the wire for granularity.
*/
@java.lang.Override
public int getGranularityValue() {
return granularity_;
}
/**
*
*
* <pre>
* Required. The granularity used to interpret the `startOffset` and
* `endOffset` for the extended reporting date range for a cohort report.
* </pre>
*
* <code>.google.analytics.data.v1alpha.CohortsRange.Granularity granularity = 1;</code>
*
* @return The granularity.
*/
@java.lang.Override
public com.google.analytics.data.v1alpha.CohortsRange.Granularity getGranularity() {
com.google.analytics.data.v1alpha.CohortsRange.Granularity result =
com.google.analytics.data.v1alpha.CohortsRange.Granularity.forNumber(granularity_);
return result == null
? com.google.analytics.data.v1alpha.CohortsRange.Granularity.UNRECOGNIZED
: result;
}
public static final int START_OFFSET_FIELD_NUMBER = 2;
private int startOffset_ = 0;
/**
*
*
* <pre>
* `startOffset` specifies the start date of the extended reporting date range
* for a cohort report. `startOffset` is commonly set to 0 so that reports
* contain data from the acquisition of the cohort forward.
*
* If `granularity` is `DAILY`, the `startDate` of the extended reporting date
* range is `startDate` of the cohort plus `startOffset` days.
*
* If `granularity` is `WEEKLY`, the `startDate` of the extended reporting
* date range is `startDate` of the cohort plus `startOffset * 7` days.
*
* If `granularity` is `MONTHLY`, the `startDate` of the extended reporting
* date range is `startDate` of the cohort plus `startOffset * 30` days.
* </pre>
*
* <code>int32 start_offset = 2;</code>
*
* @return The startOffset.
*/
@java.lang.Override
public int getStartOffset() {
return startOffset_;
}
public static final int END_OFFSET_FIELD_NUMBER = 3;
private int endOffset_ = 0;
/**
*
*
* <pre>
* Required. `endOffset` specifies the end date of the extended reporting date
* range for a cohort report. `endOffset` can be any positive integer but is
* commonly set to 5 to 10 so that reports contain data on the cohort for the
* next several granularity time periods.
*
* If `granularity` is `DAILY`, the `endDate` of the extended reporting date
* range is `endDate` of the cohort plus `endOffset` days.
*
* If `granularity` is `WEEKLY`, the `endDate` of the extended reporting date
* range is `endDate` of the cohort plus `endOffset * 7` days.
*
* If `granularity` is `MONTHLY`, the `endDate` of the extended reporting date
* range is `endDate` of the cohort plus `endOffset * 30` days.
* </pre>
*
* <code>int32 end_offset = 3;</code>
*
* @return The endOffset.
*/
@java.lang.Override
public int getEndOffset() {
return endOffset_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (granularity_
!= com.google.analytics.data.v1alpha.CohortsRange.Granularity.GRANULARITY_UNSPECIFIED
.getNumber()) {
output.writeEnum(1, granularity_);
}
if (startOffset_ != 0) {
output.writeInt32(2, startOffset_);
}
if (endOffset_ != 0) {
output.writeInt32(3, endOffset_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (granularity_
!= com.google.analytics.data.v1alpha.CohortsRange.Granularity.GRANULARITY_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, granularity_);
}
if (startOffset_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, startOffset_);
}
if (endOffset_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, endOffset_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.data.v1alpha.CohortsRange)) {
return super.equals(obj);
}
com.google.analytics.data.v1alpha.CohortsRange other =
(com.google.analytics.data.v1alpha.CohortsRange) obj;
if (granularity_ != other.granularity_) return false;
if (getStartOffset() != other.getStartOffset()) return false;
if (getEndOffset() != other.getEndOffset()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + GRANULARITY_FIELD_NUMBER;
hash = (53 * hash) + granularity_;
hash = (37 * hash) + START_OFFSET_FIELD_NUMBER;
hash = (53 * hash) + getStartOffset();
hash = (37 * hash) + END_OFFSET_FIELD_NUMBER;
hash = (53 * hash) + getEndOffset();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.data.v1alpha.CohortsRange parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1alpha.CohortsRange parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.CohortsRange parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1alpha.CohortsRange parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.CohortsRange parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1alpha.CohortsRange parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.CohortsRange parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1alpha.CohortsRange parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.CohortsRange parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1alpha.CohortsRange parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.CohortsRange parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1alpha.CohortsRange parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.analytics.data.v1alpha.CohortsRange prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Configures the extended reporting date range for a cohort report. Specifies
* an offset duration to follow the cohorts over.
* </pre>
*
* Protobuf type {@code google.analytics.data.v1alpha.CohortsRange}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.data.v1alpha.CohortsRange)
com.google.analytics.data.v1alpha.CohortsRangeOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_CohortsRange_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_CohortsRange_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.data.v1alpha.CohortsRange.class,
com.google.analytics.data.v1alpha.CohortsRange.Builder.class);
}
// Construct using com.google.analytics.data.v1alpha.CohortsRange.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
granularity_ = 0;
startOffset_ = 0;
endOffset_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_CohortsRange_descriptor;
}
@java.lang.Override
public com.google.analytics.data.v1alpha.CohortsRange getDefaultInstanceForType() {
return com.google.analytics.data.v1alpha.CohortsRange.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.data.v1alpha.CohortsRange build() {
com.google.analytics.data.v1alpha.CohortsRange result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.data.v1alpha.CohortsRange buildPartial() {
com.google.analytics.data.v1alpha.CohortsRange result =
new com.google.analytics.data.v1alpha.CohortsRange(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.analytics.data.v1alpha.CohortsRange result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.granularity_ = granularity_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.startOffset_ = startOffset_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.endOffset_ = endOffset_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.data.v1alpha.CohortsRange) {
return mergeFrom((com.google.analytics.data.v1alpha.CohortsRange) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.analytics.data.v1alpha.CohortsRange other) {
if (other == com.google.analytics.data.v1alpha.CohortsRange.getDefaultInstance()) return this;
if (other.granularity_ != 0) {
setGranularityValue(other.getGranularityValue());
}
if (other.getStartOffset() != 0) {
setStartOffset(other.getStartOffset());
}
if (other.getEndOffset() != 0) {
setEndOffset(other.getEndOffset());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
granularity_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 16:
{
startOffset_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 24:
{
endOffset_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int granularity_ = 0;
/**
*
*
* <pre>
* Required. The granularity used to interpret the `startOffset` and
* `endOffset` for the extended reporting date range for a cohort report.
* </pre>
*
* <code>.google.analytics.data.v1alpha.CohortsRange.Granularity granularity = 1;</code>
*
* @return The enum numeric value on the wire for granularity.
*/
@java.lang.Override
public int getGranularityValue() {
return granularity_;
}
/**
*
*
* <pre>
* Required. The granularity used to interpret the `startOffset` and
* `endOffset` for the extended reporting date range for a cohort report.
* </pre>
*
* <code>.google.analytics.data.v1alpha.CohortsRange.Granularity granularity = 1;</code>
*
* @param value The enum numeric value on the wire for granularity to set.
* @return This builder for chaining.
*/
public Builder setGranularityValue(int value) {
granularity_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The granularity used to interpret the `startOffset` and
* `endOffset` for the extended reporting date range for a cohort report.
* </pre>
*
* <code>.google.analytics.data.v1alpha.CohortsRange.Granularity granularity = 1;</code>
*
* @return The granularity.
*/
@java.lang.Override
public com.google.analytics.data.v1alpha.CohortsRange.Granularity getGranularity() {
com.google.analytics.data.v1alpha.CohortsRange.Granularity result =
com.google.analytics.data.v1alpha.CohortsRange.Granularity.forNumber(granularity_);
return result == null
? com.google.analytics.data.v1alpha.CohortsRange.Granularity.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Required. The granularity used to interpret the `startOffset` and
* `endOffset` for the extended reporting date range for a cohort report.
* </pre>
*
* <code>.google.analytics.data.v1alpha.CohortsRange.Granularity granularity = 1;</code>
*
* @param value The granularity to set.
* @return This builder for chaining.
*/
public Builder setGranularity(
com.google.analytics.data.v1alpha.CohortsRange.Granularity value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
granularity_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The granularity used to interpret the `startOffset` and
* `endOffset` for the extended reporting date range for a cohort report.
* </pre>
*
* <code>.google.analytics.data.v1alpha.CohortsRange.Granularity granularity = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearGranularity() {
bitField0_ = (bitField0_ & ~0x00000001);
granularity_ = 0;
onChanged();
return this;
}
private int startOffset_;
/**
*
*
* <pre>
* `startOffset` specifies the start date of the extended reporting date range
* for a cohort report. `startOffset` is commonly set to 0 so that reports
* contain data from the acquisition of the cohort forward.
*
* If `granularity` is `DAILY`, the `startDate` of the extended reporting date
* range is `startDate` of the cohort plus `startOffset` days.
*
* If `granularity` is `WEEKLY`, the `startDate` of the extended reporting
* date range is `startDate` of the cohort plus `startOffset * 7` days.
*
* If `granularity` is `MONTHLY`, the `startDate` of the extended reporting
* date range is `startDate` of the cohort plus `startOffset * 30` days.
* </pre>
*
* <code>int32 start_offset = 2;</code>
*
* @return The startOffset.
*/
@java.lang.Override
public int getStartOffset() {
return startOffset_;
}
/**
*
*
* <pre>
* `startOffset` specifies the start date of the extended reporting date range
* for a cohort report. `startOffset` is commonly set to 0 so that reports
* contain data from the acquisition of the cohort forward.
*
* If `granularity` is `DAILY`, the `startDate` of the extended reporting date
* range is `startDate` of the cohort plus `startOffset` days.
*
* If `granularity` is `WEEKLY`, the `startDate` of the extended reporting
* date range is `startDate` of the cohort plus `startOffset * 7` days.
*
* If `granularity` is `MONTHLY`, the `startDate` of the extended reporting
* date range is `startDate` of the cohort plus `startOffset * 30` days.
* </pre>
*
* <code>int32 start_offset = 2;</code>
*
* @param value The startOffset to set.
* @return This builder for chaining.
*/
public Builder setStartOffset(int value) {
startOffset_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* `startOffset` specifies the start date of the extended reporting date range
* for a cohort report. `startOffset` is commonly set to 0 so that reports
* contain data from the acquisition of the cohort forward.
*
* If `granularity` is `DAILY`, the `startDate` of the extended reporting date
* range is `startDate` of the cohort plus `startOffset` days.
*
* If `granularity` is `WEEKLY`, the `startDate` of the extended reporting
* date range is `startDate` of the cohort plus `startOffset * 7` days.
*
* If `granularity` is `MONTHLY`, the `startDate` of the extended reporting
* date range is `startDate` of the cohort plus `startOffset * 30` days.
* </pre>
*
* <code>int32 start_offset = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearStartOffset() {
bitField0_ = (bitField0_ & ~0x00000002);
startOffset_ = 0;
onChanged();
return this;
}
private int endOffset_;
/**
*
*
* <pre>
* Required. `endOffset` specifies the end date of the extended reporting date
* range for a cohort report. `endOffset` can be any positive integer but is
* commonly set to 5 to 10 so that reports contain data on the cohort for the
* next several granularity time periods.
*
* If `granularity` is `DAILY`, the `endDate` of the extended reporting date
* range is `endDate` of the cohort plus `endOffset` days.
*
* If `granularity` is `WEEKLY`, the `endDate` of the extended reporting date
* range is `endDate` of the cohort plus `endOffset * 7` days.
*
* If `granularity` is `MONTHLY`, the `endDate` of the extended reporting date
* range is `endDate` of the cohort plus `endOffset * 30` days.
* </pre>
*
* <code>int32 end_offset = 3;</code>
*
* @return The endOffset.
*/
@java.lang.Override
public int getEndOffset() {
return endOffset_;
}
/**
*
*
* <pre>
* Required. `endOffset` specifies the end date of the extended reporting date
* range for a cohort report. `endOffset` can be any positive integer but is
* commonly set to 5 to 10 so that reports contain data on the cohort for the
* next several granularity time periods.
*
* If `granularity` is `DAILY`, the `endDate` of the extended reporting date
* range is `endDate` of the cohort plus `endOffset` days.
*
* If `granularity` is `WEEKLY`, the `endDate` of the extended reporting date
* range is `endDate` of the cohort plus `endOffset * 7` days.
*
* If `granularity` is `MONTHLY`, the `endDate` of the extended reporting date
* range is `endDate` of the cohort plus `endOffset * 30` days.
* </pre>
*
* <code>int32 end_offset = 3;</code>
*
* @param value The endOffset to set.
* @return This builder for chaining.
*/
public Builder setEndOffset(int value) {
endOffset_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. `endOffset` specifies the end date of the extended reporting date
* range for a cohort report. `endOffset` can be any positive integer but is
* commonly set to 5 to 10 so that reports contain data on the cohort for the
* next several granularity time periods.
*
* If `granularity` is `DAILY`, the `endDate` of the extended reporting date
* range is `endDate` of the cohort plus `endOffset` days.
*
* If `granularity` is `WEEKLY`, the `endDate` of the extended reporting date
* range is `endDate` of the cohort plus `endOffset * 7` days.
*
* If `granularity` is `MONTHLY`, the `endDate` of the extended reporting date
* range is `endDate` of the cohort plus `endOffset * 30` days.
* </pre>
*
* <code>int32 end_offset = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearEndOffset() {
bitField0_ = (bitField0_ & ~0x00000004);
endOffset_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.data.v1alpha.CohortsRange)
}
// @@protoc_insertion_point(class_scope:google.analytics.data.v1alpha.CohortsRange)
private static final com.google.analytics.data.v1alpha.CohortsRange DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.data.v1alpha.CohortsRange();
}
public static com.google.analytics.data.v1alpha.CohortsRange getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CohortsRange> PARSER =
new com.google.protobuf.AbstractParser<CohortsRange>() {
@java.lang.Override
public CohortsRange parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CohortsRange> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CohortsRange> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.data.v1alpha.CohortsRange getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/empire-db | 35,995 | empire-db/src/main/java/org/apache/empire/dbms/DBMSHandlerBase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.empire.dbms;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Timestamp;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import org.apache.empire.commons.DateUtils;
import org.apache.empire.commons.ObjectUtils;
import org.apache.empire.commons.StringUtils;
import org.apache.empire.data.DataType;
import org.apache.empire.db.DBBlobData;
import org.apache.empire.db.DBClobData;
import org.apache.empire.db.DBCmdParam;
import org.apache.empire.db.DBColumn;
import org.apache.empire.db.DBColumnExpr;
import org.apache.empire.db.DBCombinedCmd;
import org.apache.empire.db.DBCommand;
import org.apache.empire.db.DBCommandExpr;
import org.apache.empire.db.DBDDLGenerator.DDLActionType;
import org.apache.empire.db.DBDatabase;
import org.apache.empire.db.DBObject;
import org.apache.empire.db.DBRelation;
import org.apache.empire.db.DBRowSet;
import org.apache.empire.db.DBSQLBuilder;
import org.apache.empire.db.DBSQLScript;
import org.apache.empire.db.DBTable;
import org.apache.empire.db.DBTableColumn;
import org.apache.empire.db.exceptions.EmpireSQLException;
import org.apache.empire.db.exceptions.QueryFailedException;
import org.apache.empire.db.validation.DBModelChecker;
import org.apache.empire.db.validation.DBModelParser;
import org.apache.empire.exceptions.InvalidArgumentException;
import org.apache.empire.exceptions.NotSupportedException;
import org.apache.empire.exceptions.UnexpectedReturnValueException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The DBMSHandler class is an abstract base class for all database handler.
* Its purpose is to handle everything that is - or might be - database vendor specific.
*/
public abstract class DBMSHandlerBase implements DBMSHandler
{
private static final Logger log = LoggerFactory.getLogger(DBMSHandler.class);
// Illegal name chars and reserved SQL keywords
protected static final char[] ILLEGAL_NAME_CHARS = new char[] { '@', '?', '>', '=', '<', ';', ':',
'/', '.', '-', ',', '+', '*', ')', '(',
'\'', '&', '%', '!', ' '
};
protected static final String[] GENERAL_SQL_KEYWORDS = new String[] {
"table", "column", "view", "index", "constraint",
"select", "udpate", "insert", "alter", "delete",
"join", "on", "group", "by", "order", "asc", "desc", "all",
"with", "user" };
protected final Set<String> reservedSQLKeywords;
// Postfix for auto-generated Sequence names
protected String SEQUENCE_NAME_SUFFIX = "_SEQ";
/**
* DBMSBuilder
* A Default DBSQLBuilder implementation with no additional features
*/
public static final class DBMSBuilder extends DBSQLBuilder
{
protected DBMSBuilder(DBMSHandler dbms)
{
super(dbms);
}
}
/**
* DBMSCommand
* A Default DBCommand implementation with no additional features
*/
public static final class DBMSCommand extends DBCommand
{
protected DBMSCommand(DBMSHandler dbms, boolean autoPrepareStmt)
{
super(dbms, autoPrepareStmt);
}
}
/**
* This class is used to emulate sequences by using a sequence table.
* It is used with the executeSQL function and only required for insert statements
*/
public static class DBSeqTable extends DBTable
{
// *Deprecated* private static final long serialVersionUID = 1L;
public DBColumn C_SEQNAME;
public DBColumn C_SEQVALUE;
public DBColumn C_TIMESTAMP;
/**
* Constructor
*
* @param tableName the table name
* @param db the database object
*/
public DBSeqTable(String tableName, DBDatabase db)
{
super(tableName, db);
// Add all Colums
C_SEQNAME = addColumn("SeqName", DataType.VARCHAR, 40, true);
C_SEQVALUE = addColumn("SeqValue", DataType.INTEGER, 0, true);
C_TIMESTAMP = addColumn("SeqTime", DataType.DATETIME, 0, true);
// Primary Key
setPrimaryKey(new DBColumn[] { C_SEQNAME });
}
// Overrideable
public Object getNextValue(String SeqName, long minValue, Connection conn)
{
DBMSHandler dbms = db.getDbms();
// Create a Command
PreparedStatement stmt = null;
try
{ // The select Statement
DBCommand cmd = dbms.createCommand(db.isPreparedStatementsEnabled());
DBCmdParam nameParam = cmd.addParam(SeqName);
cmd.select(C_SEQVALUE);
cmd.select(C_TIMESTAMP);
cmd.where (C_SEQNAME.is(nameParam));
String selectCmd = cmd.getSelect();
// Get the next Value
long seqValue = 0;
while (seqValue == 0)
{
// stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
stmt = conn.prepareStatement(selectCmd, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
stmt.setString(1, SeqName);
// Query existing value
ResultSet rs = stmt.executeQuery();
if (rs.next())
{ // Read the Sequence Value
seqValue = Math.max(rs.getLong(1) + 1, minValue);
java.sql.Timestamp current = rs.getTimestamp(2);
dbms.closeResultSet(rs);
// Update existing Record
cmd.clear();
DBCmdParam name = cmd.addParam(SeqName);
DBCmdParam time = cmd.addParam(current);
cmd.set(C_SEQVALUE.to(seqValue));
cmd.set(C_TIMESTAMP.to(DBDatabase.SYSDATE));
cmd.where(C_SEQNAME.is(name));
cmd.where(C_TIMESTAMP.is(time));
if (dbms.executeSQL(cmd.getUpdate(), cmd.getParamValues(), conn, null) < 1)
seqValue = 0; // Try again
}
else
{ // Close Reader
dbms.closeResultSet(rs);
// sequence does not exist
seqValue = minValue;
log.warn("Sequence {} does not exist! Creating sequence with start-value of {}", SeqName, seqValue);
// create a new sequence entry
cmd.clear();
cmd.set(C_SEQNAME.to(SeqName));
cmd.set(C_SEQVALUE.to(seqValue));
cmd.set(C_TIMESTAMP.to(DBDatabase.SYSDATE));
if (dbms.executeSQL(cmd.getInsert(), cmd.getParamValues(), conn, null) < 1)
seqValue = 0; // Try again
}
// check for concurrency problem
if (seqValue == 0)
log.warn("Failed to increment sequence {}. Trying again!", SeqName);
// close
closeStatement(stmt);
cmd.clear();
rs = null;
}
if (log.isInfoEnabled())
log.info("Sequence {} incremented to {}.", SeqName, seqValue);
return new Long(seqValue);
} catch (SQLException e) {
// throw exception
throw new EmpireSQLException(this, e);
} finally
{ // Cleanup
closeStatement(stmt);
}
}
/*
* cleanup
*/
private void closeStatement(Statement stmt)
{
try
{ // Statement close
if (stmt != null)
stmt.close();
// done
return;
} catch (SQLException sqle) {
// Commit failed!
throw new EmpireSQLException(this, sqle);
}
}
}
/**
* Constructor
*/
protected DBMSHandlerBase(String[] specificSqlKeywords)
{
// Initialize List of reserved Keywords
int capacity = GENERAL_SQL_KEYWORDS.length + specificSqlKeywords.length;
reservedSQLKeywords = new HashSet<String>(capacity);
for (String keyWord : GENERAL_SQL_KEYWORDS) {
reservedSQLKeywords.add(keyWord);
}
// Initialize List of reserved Keywords
for (String keyWord : specificSqlKeywords) {
addSQLKeyword(keyWord);
}
}
/**
* Constructor
*/
protected DBMSHandlerBase()
{
this(new String[0]);
}
/**
* Adds an additional SQL Keyword to the keyword list
* @param keyWord
*/
protected void addSQLKeyword(String keyWord)
{
reservedSQLKeywords.add(keyWord.toLowerCase());
}
/**
* checks if the database exists
* The default implementation performs a simple count query on the first table or view
* SELECT count(*) FROM table
* @return true if the database exists or false otherwise
*/
@Override
public boolean checkExists(DBDatabase db, Connection conn)
{
// Default implementation:
// Select the count from ANY table or view
List<DBTable> tables = db.getTables();
DBRowSet any = (tables.isEmpty() ? db.getViews().get(0) : tables.get(0));
String schema = db.getSchema();
String linkName = db.getLinkName();
// build the statement
DBSQLBuilder sql = createSQLBuilder();
sql.append("SELECT count(*) from ");
if (schema != null)
{ // Add Schema
sql.append(schema);
sql.append(".");
}
// Append the name
appendObjectName(sql, any.getName(), null);
if (linkName!=null)
{ // Database Link
sql.append(getSQLPhrase(DBSqlPhrase.SQL_DATABASE_LINK));
sql.append(linkName);
}
// Select now
try {
querySingleValue(sql.toString(), null, DataType.INTEGER, conn);
return true;
} catch(QueryFailedException e) {
// Database does not exist
return false;
}
}
/**
* Called when a database is opened
*/
@Override
public void attachDatabase(DBDatabase db, Connection conn)
{
/* Nothing here */
}
/**
* Called when a database is closed
*/
@Override
public void detachDatabase(DBDatabase db, Connection conn)
{
/* Nothing here */
}
/**
* This function creates a DBSQLBuilder for this DBMS
* @return a DBMS specific DBSQLBuilder object
*/
@Override
public DBSQLBuilder createSQLBuilder()
{
return new DBMSBuilder(this);
}
/**
* This function creates a DBCommand derived object this database
* @param autoPrepareStmt flag whether to automatically provide literal values as prepared statement params
* @return a DBCommand object
*/
@Override
public DBCommand createCommand(boolean autoPrepareStmt)
{
return new DBMSCommand(this, autoPrepareStmt);
}
/**
* This function gives the dbms a chance to provide a custom implementation
* for a combined command such as UNION or INTERSECT
* @param left the left command
* @param keyWord the key word (either "UNION" or "INTERSECT")
* @param left the right command
* @return a DBCommandExpr object
*/
@Override
public DBCommandExpr createCombinedCommand(DBCommandExpr left, String keyWord, DBCommandExpr right)
{
return new DBCombinedCmd(left, keyWord, right);
}
/**
* Returns whether or not a particular feature is supported by this dbms
* @param type type of requested feature. @see DBMSFeature
* @return true if the features is supported or false otherwise
*/
@Override
public abstract boolean isSupported(DBMSFeature type);
/**
* Detects whether a table or column name needs to be quoted or not<br>
* By default all reserved SQL keywords as well as names
* containing a "-", "/", "+" or " " require quoting.<br>
* Overrides this function to add database specific keywords like "user" or "count"
* @param name the name which to check
* @return true if the name needs to be quoted or false otherwise
*/
@Override
public boolean detectQuoteName(DBObject object, String name)
{
// Check for reserved names
if (reservedSQLKeywords.contains(name.toLowerCase()))
return true;
// Check for illegalNameChars
int len = name.length();
for (int i=0; i<len; i++)
{ char c = name.charAt(i);
for (int j=0; j<ILLEGAL_NAME_CHARS.length; j++)
{ char ic = ILLEGAL_NAME_CHARS[j];
if (c>ic)
break;
if (c==ic)
return true;
}
}
// Quoting not necessary
return false;
}
/**
* Appends a table, view or column name to an SQL phrase.
*
* @param sql the StringBuilder containing the SQL phrase.
* @param name the name of the object (table, view or column)
* @param useQuotes use quotes or not
*/
@Override
public void appendObjectName(DBSQLBuilder sql, String name, Boolean useQuotes)
{
if (useQuotes==null)
useQuotes = detectQuoteName(null, name);
// Check whether to use quotes or not
if (useQuotes)
sql.append(getSQLPhrase(DBSqlPhrase.SQL_QUOTES_OPEN));
// Append Name
sql.append(name);
// End Quotes
if (useQuotes)
sql.append(getSQLPhrase(DBSqlPhrase.SQL_QUOTES_CLOSE));
}
/**
* Returns a timestamp that is used for record updates.
* @param conn the connection that might be used
* @return the current date and time.
*/
@Override
public Timestamp getUpdateTimestamp(Connection conn)
{
// Default implementation
java.util.Date date = new java.util.Date();
return new java.sql.Timestamp(date.getTime());
}
/**
* Returns the next value of a named sequence The numbers are used for fields of type DBExpr.DT_AUTOINC.<BR>
* If a dbms supports this function it must return true for isSupported(DBMSFeature.SEQUENCES).
*
* @param db the database
* @param SeqName the name of the sequence
* @param minValue the minimum value of the sequence
* @param conn a valid database connection
* @return a new unique sequence value or null if an error occurred
*/
public abstract Object getNextSequenceValue(DBDatabase db, String SeqName, int minValue, Connection conn);
/**
* Returns an expression for creating a sequence value.
* This is intended for the use with INSERT INTO statements where many records are affected.
* @param column the column for which to obtain an expression providing the next sequence value
* @return an expression for the next sequence value
*/
public abstract DBColumnExpr getNextSequenceValueExpr(DBTableColumn column);
/**
* Returns the sequence name of for a column of type AUTOINC
* The sequence name is usually provided as the default value
* If no Default value is provided the sequence name is generated from the table and the column name
* @param column the column for which to get a sequence
* @return the sequence name
*/
public String getColumnSequenceName(DBTableColumn column)
{
if (column.getDataType()!=DataType.AUTOINC)
throw new InvalidArgumentException("column", column);
// return the sequence name
Object seqName = column.getDefaultValue();
if (seqName!=null)
return seqName.toString();
// Auto-generate the sequence name
StringBuilder b = new StringBuilder(column.getRowSet().getName());
b.append("_");
b.append(column.getName());
b.append(SEQUENCE_NAME_SUFFIX);
seqName = b.toString();
// Store as default for later use
column.setDefaultValue(seqName);
return (String)seqName;
}
/**
* Returns an auto-generated value for a particular column
*
* @param db the database
* @param column the column for which a value is required
* @param conn a valid database connection
* @return the auto-generated value
*/
@Override
public Object getColumnAutoValue(DBDatabase db, DBTableColumn column, Connection conn)
{
// Supports sequences?
DataType type = column.getDataType();
if (type == DataType.AUTOINC)
{ // Use a numeric sequence
if (isSupported(DBMSFeature.SEQUENCES)==false)
return null; // Create Later
String sequenceName = getColumnSequenceName(column);
return getNextSequenceValue(db, sequenceName, 1, conn);
}
else if (type== DataType.UNIQUEID)
{ // emulate using java.util.UUID
return UUID.randomUUID();
}
else if (type==DataType.DATE || type==DataType.TIME || type==DataType.DATETIME || type==DataType.TIMESTAMP)
{ if (conn==null)
return null; // No connection
// Get database system's date and time
Date ts = getUpdateTimestamp(conn);
if (type==DataType.DATE)
return DateUtils.getDateOnly(ts);
if (type==DataType.TIME)
return DateUtils.getTimeOnly(ts);
return ts;
}
// Other types
throw new NotSupportedException(this, "getColumnAutoValue for "+column.getFullName());
}
/**
* Returns an expression that ignores the case of a column expression
* Only for text columns.
* Default is upper(expr)
* @param expr the expression for which to ignore the case
* @return the ignore case expression or the expression itself
*/
@Override
public DBColumnExpr getIgnoreCaseExpr(DBColumnExpr expr)
{
if (expr==null || !expr.getDataType().isText())
{ // not a CHAR or VARCHAR
return expr;
}
return expr.upper();
}
/**
* Reads a single column value from the given JDBC ResultSet and returns a value object of desired data type.<BR>
*
* This gives the dbms the opportunity to change the value
* i.e. to simulate missing data types with other types.
*
* @param rset the sql Resultset with the current data row
* @param columnIndex one based column Index of the desired column
* @param dataType the required data type
*
* @return the value of the Column
*
* @throws SQLException if a database access error occurs
*/
@Override
public Object getResultValue(ResultSet rset, int columnIndex, DataType dataType)
throws SQLException
{
// Special handing of DATE, TIME, DATETIME and TIMESTAMP
if (dataType == DataType.DATE)
{ // use getDate() (do not use getObject()!)
return rset.getDate(columnIndex);
}
if (dataType == DataType.TIME)
{ // use getTime() (do not use getObject()!)
return rset.getTime(columnIndex);
}
if (dataType == DataType.DATETIME || dataType == DataType.TIMESTAMP)
{ // use getTimestamp() (do not use getObject()!)
return rset.getTimestamp(columnIndex);
}
// Check for character large object
if (dataType == DataType.CLOB)
{ // Get string from character large object
java.sql.Clob clob = rset.getClob(columnIndex);
return ((clob != null) ? clob.getSubString(1, (int) clob.length()) : null);
}
// Check for binary large object
if (dataType == DataType.BLOB)
{ // Get bytes of a binary large object
java.sql.Blob blob = rset.getBlob(columnIndex);
return ((blob != null) ? blob.getBytes(1, (int) blob.length()) : null);
}
// default
return rset.getObject(columnIndex);
}
/**
* Executes the select, update or delete SQL-Command with a Statement object.
*
* @param sqlCmd the SQL-Command
* @param sqlParams array of sql command parameters used for prepared statements (Optional).
* @param conn a valid connection to the database.
* @param genKeys allows to set the auto generated key of a record (INSERT statements only)
*
* @return the row count for insert, update or delete or 0 for SQL statements that return nothing
*
* @throws SQLException if a database access error occurs
*/
@Override
public int executeSQL(String sqlCmd, Object[] sqlParams, Connection conn, DBSetGenKeys genKeys)
throws SQLException
{ // Execute the Statement
Statement stmt = null;
try
{
int count = 0;
if (sqlParams!=null)
{ // Use a prepared statement
PreparedStatement pstmt = (genKeys!=null)
? conn.prepareStatement(sqlCmd, Statement.RETURN_GENERATED_KEYS)
: conn.prepareStatement(sqlCmd);
stmt = pstmt;
prepareStatement(pstmt, sqlParams);
count = pstmt.executeUpdate();
}
else
{ // Execute a simple statement
stmt = conn.createStatement();
count = (genKeys!=null)
? stmt.executeUpdate(sqlCmd, Statement.RETURN_GENERATED_KEYS)
: stmt.executeUpdate(sqlCmd);
}
// Retrieve any auto-generated keys
if (genKeys!=null && count>0)
{ // Return Keys
ResultSet rs = stmt.getGeneratedKeys();
try {
int rownum = 0;
while(rs.next())
{
genKeys.set(rownum++, rs.getObject(1));
}
} finally {
rs.close();
}
}
// done
return count;
} finally {
closeStatement(stmt);
}
}
/**
* Executes a list of sql statements as batch
* @param sqlCmd an array of sql statements
* @param sqlCmdParams and array of statement parameters
* @param conn a JDBC connection
* @return an array containing the number of records affected by each statement
* @throws SQLException thrown if a database access error occurs
*/
@Override
public int[] executeBatch(String[] sqlCmd, Object[][] sqlCmdParams, Connection conn)
throws SQLException
{ // Execute the Statement
if (sqlCmdParams!=null)
{ // Use a prepared statement
PreparedStatement pstmt = null;
try
{
int pos=0;
String lastCmd = null;
int[] result = new int[sqlCmd.length];
for (int i=0; i<=sqlCmd.length; i++)
{ // get cmd
String cmd = (i<sqlCmd.length ? sqlCmd[i] : null);
if (StringUtils.compareEqual(cmd, lastCmd, true)==false)
{ // close last statement
if (pstmt!=null)
{ // execute and close
log.debug("Executing batch containing {} statements", i-pos);
int[] res = pstmt.executeBatch();
for (int j=0; j<res.length; j++)
result[pos+j]=res[j];
pos+=res.length;
closeStatement(pstmt);
pstmt = null;
}
// has next?
if (cmd==null)
break;
// new statement
if (log.isTraceEnabled())
log.trace("Creating prepared statement for batch: {}", cmd);
pstmt = conn.prepareStatement(cmd);
lastCmd = cmd;
}
// add batch
if (sqlCmdParams[i]!=null)
{
prepareStatement(pstmt, sqlCmdParams[i]);
}
if (log.isTraceEnabled())
log.trace("Adding batch with {} params.", (sqlCmdParams[i]!=null ? sqlCmdParams[i].length : 0));
pstmt.addBatch();
}
return result;
} finally {
closeStatement(pstmt);
}
}
else
{ // Execute a simple statement
Statement stmt = conn.createStatement();
try {
for (int i=0; i<sqlCmd.length; i++)
{
String cmd = sqlCmd[i];
if (log.isTraceEnabled())
log.trace("Adding statement to batch: {}", cmd);
stmt.addBatch(cmd);
}
log.debug("Executing batch containing {} statements", sqlCmd.length);
int result[] = stmt.executeBatch();
return result;
} finally {
closeStatement(stmt);
}
}
}
/**
* Executes an select SQL-command and returns the query results
*
* @param sqlCmd the SQL-Command
* @param sqlParams array of sql command parameters used for prepared statements (Optional).
* @param scrollable true if scrollable or false otherwise
* @param conn a valid connection to the database.
* @return the JDBC resultset
* @throws SQLException thrown if a database access error occurs
*/
@Override
public ResultSet executeQuery(String sqlCmd, Object[] sqlParams, boolean scrollable, Connection conn)
throws SQLException
{
Statement stmt = null;
try
{ // Set scroll type
int type = (scrollable ? ResultSet.TYPE_SCROLL_INSENSITIVE
: ResultSet.TYPE_FORWARD_ONLY);
// Create an execute a query statement
if (sqlParams!=null)
{ // Use prepared statement
PreparedStatement pstmt = conn.prepareStatement(sqlCmd, type, ResultSet.CONCUR_READ_ONLY);
stmt = pstmt;
prepareStatement(pstmt, sqlParams);
return pstmt.executeQuery();
} else
{ // Use simple statement
stmt = conn.createStatement(type, ResultSet.CONCUR_READ_ONLY);
return stmt.executeQuery(sqlCmd);
}
} catch(SQLException e) {
// close statement (if not null)
if (log.isDebugEnabled())
log.debug("Error executeQuery '"+sqlCmd+"' --> "+e.getMessage(), e);
closeStatement(stmt);
throw e;
}
}
/**
* Query a single value
* @return the value of the first column in the first row of the query
*/
@Override
public Object querySingleValue(String sqlCmd, Object[] sqlParams, DataType dataType, Connection conn)
{
ResultSet rs = null;
try
{ // Get the next Value
rs = executeQuery(sqlCmd, sqlParams, false, conn);
if (rs == null)
throw new UnexpectedReturnValueException(rs, "dbms.executeQuery()");
// Check Result
if (rs.next() == false)
{ // no result
log.trace("querySingleValue for {} returned no result", sqlCmd);
return ObjectUtils.NO_VALUE;
}
// Read value
return getResultValue(rs, 1, dataType);
} catch (SQLException sqle)
{ // Error
throw new QueryFailedException(this, sqlCmd, sqle);
} finally {
// Cleanup
closeResultSet(rs);
}
}
/**
* Appends a statement to enable or disable a foreign key relation.<br>
* The default is to drop or create the relation
* Override this method to provide different behavior for your database.
* @param r the foreign key relation which should be enabled or disabled
* @param enable true to enable the relation or false to disable
* @param script the script to which to add the DDL command(s)
*/
@Override
public void appendEnableRelationStmt(DBRelation r, boolean enable, DBSQLScript script)
{
if (enable)
getDDLScript(DDLActionType.CREATE, r, script);
else
getDDLScript(DDLActionType.DROP, r, script);
}
/**
* Creates a DataModelParser instance of this DBMSHandler
* @return the model parser
*/
@Override
public DBModelParser createModelParser(String catalog, String schema)
{
return new DBModelParser(catalog, schema);
}
/**
* Creates a DataModelChecker instance of this DBMSHandler
* @return the model checker
*/
@Override
public DBModelChecker createModelChecker(DBDatabase db)
{
log.warn("A general and possibly untested DBModelChecker is used for DBMSHandler {}. Please override to inklude DBMS specific features.", getClass().getSimpleName());
// the default model checker
DBModelParser modelParser = createModelParser(null, db.getSchema());
return new DBModelChecker(modelParser);
}
/**
* Extracts native error message of an sqlExeption.
*
* @param e the SQLException
* @return the error message of the database
*/
@Override
public String extractErrorMessage(SQLException e)
{
return e.getMessage();
}
/**
* Convenience function for closing a JDBC Resultset<BR>
* Use it instead of rset.close() and stmt.close()<BR>
* <P>
* @param rset a ResultSet object
*/
@Override
public void closeResultSet(ResultSet rset)
{
try
{ // check ResultSet
if (rset == null)
return; // nothing to do
// close Resultset
Statement stmt = rset.getStatement();
rset.close();
// check Statement
if (stmt == null)
return;
// close Statement
stmt.close();
// done
return;
} catch (SQLException sqle) {
// Commit failed!
throw new EmpireSQLException(this, sqle);
}
}
/**
* Convenience function for closing a JDBC Resultset<BR>
* Use it instead of stmt.close()<BR>
* <P>
* @param stmt a Statement object
*/
protected void closeStatement(Statement stmt)
{
try
{ // Statement close
if (stmt != null)
stmt.close();
// done
return;
} catch (SQLException sqle) {
// Commit failed!
throw new EmpireSQLException(this, sqle);
}
}
/**
* Prepares an sql statement by setting the supplied objects as parameters.
*
* @param pstmt the prepared statement
* @param sqlParams list of objects
* @throws SQLException thrown if a database access error occurs
*/
protected void prepareStatement(PreparedStatement pstmt, Object[] sqlParams)
throws SQLException
{
for (int i=0; i<sqlParams.length; i++)
{
Object value = sqlParams[i];
try {
addStatementParam(pstmt, i+1, value); // , conn
} catch(SQLException e) {
log.error("SQLException: Unable to set prepared statement parameter {} to '{}'", i+1, StringUtils.toString(value));
throw e;
}
}
}
/**
* Adds a statement parameter to a prepared statement
*
* @param pstmt the prepared statement
* @param paramIndex the parameter index
* @throws SQLException thrown if a database access error occurs
* @param value the parameter value
*/
protected void addStatementParam(PreparedStatement pstmt, int paramIndex, Object value)
throws SQLException
{
if (value instanceof DBBlobData)
{
// handling for blobs
DBBlobData blobData = (DBBlobData)value;
pstmt.setBinaryStream(paramIndex, blobData.getInputStream(), blobData.getLength());
// log
if (log.isTraceEnabled())
log.trace("Statement param {} set to BLOB data", paramIndex);
}
else if(value instanceof DBClobData)
{
// handling for clobs
DBClobData clobData = (DBClobData)value;
pstmt.setCharacterStream(paramIndex, clobData.getReader(), clobData.getLength());
// log
if (log.isTraceEnabled())
log.trace("Statement param {} set to CLOB data", paramIndex);
}
else if(value instanceof Date && !(value instanceof Timestamp))
{
// handling for dates
Timestamp ts = new Timestamp(((Date)value).getTime());
pstmt.setObject(paramIndex, ts);
// log
if (log.isTraceEnabled())
log.trace("Statement param {} set to date '{}'", paramIndex, ts);
}
else if((value instanceof Character)
|| (value instanceof Enum<?>))
{
// Objects that need String conversion
String strval = value.toString();
pstmt.setObject(paramIndex, strval);
// log
if (log.isTraceEnabled())
log.trace("Statement param {} set to '{}'", paramIndex, strval);
}
else
{ // simple parameter value
pstmt.setObject(paramIndex, value);
// log
if (log.isTraceEnabled())
log.trace("Statement param {} set to '{}'", paramIndex, value);
}
}
} |
apache/poi | 36,125 | poi/src/main/java/org/apache/poi/poifs/macros/VBAMacroReader.java | /* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.poifs.macros;
import static org.apache.logging.log4j.util.Unbox.box;
import static org.apache.poi.util.StringUtil.endsWithIgnoreCase;
import static org.apache.poi.util.StringUtil.startsWithIgnoreCase;
import java.io.Closeable;
import java.io.EOFException;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
import org.apache.commons.io.output.UnsynchronizedByteArrayOutputStream;
import org.apache.logging.log4j.Logger;
import org.apache.poi.logging.PoiLogManager;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.DocumentInputStream;
import org.apache.poi.poifs.filesystem.DocumentNode;
import org.apache.poi.poifs.filesystem.Entry;
import org.apache.poi.poifs.filesystem.FileMagic;
import org.apache.poi.poifs.filesystem.OfficeXmlFileException;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.poifs.macros.Module.ModuleType;
import org.apache.poi.util.CodePageUtil;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.RLEDecompressingInputStream;
import org.apache.poi.util.StringUtil;
/**
* <p>Finds all VBA Macros in an office file (OLE2/POIFS and OOXML/OPC),
* and returns them.
* </p>
* <p>
* <b>NOTE:</b> This does not read macros from .ppt files.
* See org.apache.poi.hslf.usermodel.TestBugs.getMacrosFromHSLF() in the scratchpad
* module for an example of how to do this. Patches that make macro
* extraction from .ppt more elegant are welcomed!
* </p>
*
* @since 3.15-beta2
*/
@SuppressWarnings("unused")
public class VBAMacroReader implements Closeable {
private static final Logger LOGGER = PoiLogManager.getLogger(VBAMacroReader.class);
//arbitrary limit on size of strings to read, etc.
private static final int MAX_STRING_LENGTH = 20000;
protected static final String VBA_PROJECT_OOXML = "vbaProject.bin";
protected static final String VBA_PROJECT_POIFS = "VBA";
private POIFSFileSystem fs;
public VBAMacroReader(InputStream rstream) throws IOException {
InputStream is = FileMagic.prepareToCheckMagic(rstream);
FileMagic fm = FileMagic.valueOf(is);
if (fm == FileMagic.OLE2) {
fs = new POIFSFileSystem(is);
} else {
openOOXML(is);
}
}
public VBAMacroReader(File file) throws IOException {
try {
this.fs = new POIFSFileSystem(file);
} catch (OfficeXmlFileException e) {
openOOXML(Files.newInputStream(file.toPath()));
}
}
public VBAMacroReader(POIFSFileSystem fs) {
this.fs = fs;
}
private void openOOXML(InputStream zipFile) throws IOException {
try(ZipInputStream zis = new ZipInputStream(zipFile)) {
ZipEntry zipEntry;
while ((zipEntry = zis.getNextEntry()) != null) {
if (endsWithIgnoreCase(zipEntry.getName(), VBA_PROJECT_OOXML)) {
try {
// Make a POIFSFileSystem from the contents, and close the stream
this.fs = new POIFSFileSystem(zis);
return;
} catch (IOException e) {
// Tidy up
zis.close();
// Pass on
throw e;
}
}
}
}
throw new IllegalArgumentException("No VBA project found");
}
@Override
public void close() throws IOException {
fs.close();
fs = null;
}
public Map<String, Module> readMacroModules() throws IOException {
final ModuleMap modules = new ModuleMap();
//ascii -> unicode mapping for module names
//preserve insertion order
final Map<String, String> moduleNameMap = new LinkedHashMap<>();
findMacros(fs.getRoot(), modules);
findModuleNameMap(fs.getRoot(), moduleNameMap, modules);
findProjectProperties(fs.getRoot(), moduleNameMap, modules);
Map<String, Module> moduleSources = new HashMap<>();
for (Map.Entry<String, ModuleImpl> entry : modules.entrySet()) {
ModuleImpl module = entry.getValue();
module.charset = modules.charset;
moduleSources.put(entry.getKey(), module);
}
return moduleSources;
}
/**
* Reads all macros from all modules of the opened office file.
* @return All the macros and their contents
*
* @since 3.15-beta2
*/
public Map<String, String> readMacros() throws IOException {
Map<String, Module> modules = readMacroModules();
Map<String, String> moduleSources = new HashMap<>();
for (Map.Entry<String, Module> entry : modules.entrySet()) {
moduleSources.put(entry.getKey(), entry.getValue().getContent());
}
return moduleSources;
}
protected static class ModuleImpl implements Module {
Integer offset;
byte[] buf;
ModuleType moduleType;
Charset charset;
void read(InputStream in) throws IOException {
buf = IOUtils.toByteArray(in);
}
@Override
public String getContent() {
return new String(buf, charset);
}
@Override
public ModuleType geModuleType() {
return moduleType;
}
}
protected static class ModuleMap extends HashMap<String, ModuleImpl> {
Charset charset = StringUtil.WIN_1252; // default charset
}
/**
* Recursively traverses directory structure rooted at {@code dir}.
* For each macro module that is found, the module's name and code are
* added to {@code modules}.
*
* @param dir The directory of entries to look at
* @param modules The resulting map of modules
* @throws IOException If reading the VBA module fails
* @since 3.15-beta2
*/
protected void findMacros(DirectoryNode dir, ModuleMap modules) throws IOException {
if (VBA_PROJECT_POIFS.equalsIgnoreCase(dir.getName())) {
// VBA project directory, process
readMacros(dir, modules);
} else {
// Check children
for (Entry child : dir) {
if (child instanceof DirectoryNode) {
findMacros((DirectoryNode)child, modules);
}
}
}
}
/**
* reads module from DIR node in input stream and adds it to the modules map for decompression later
* on the second pass through this function, the module will be decompressed
*
* Side-effects: adds a new module to the module map or sets the buf field on the module
* to the decompressed stream contents (the VBA code for one module)
*
* @param in the run-length encoded input stream to read from
* @param streamName the stream name of the module
* @param modules a map to store the modules
* @throws IOException If reading data from the stream or from modules fails
*/
private static void readModuleMetadataFromDirStream(RLEDecompressingInputStream in, String streamName, ModuleMap modules) throws IOException {
int moduleOffset = in.readInt();
ModuleImpl module = modules.get(streamName);
if (module == null) {
// First time we've seen the module. Add it to the ModuleMap and decompress it later
module = new ModuleImpl();
module.offset = moduleOffset;
modules.put(streamName, module);
// Would adding module.read(in) here be correct?
} else {
// Decompress a previously found module and store the decompressed result into module.buf
InputStream stream = new RLEDecompressingInputStream(
UnsynchronizedByteArrayInputStream.builder().
setByteArray(module.buf).
setOffset(moduleOffset).
setLength(module.buf.length - moduleOffset).
get()
);
module.read(stream);
stream.close();
}
}
private static void readModuleFromDocumentStream(DocumentNode documentNode, String name, ModuleMap modules) throws IOException {
ModuleImpl module = modules.get(name);
// TODO Refactor this to fetch dir then do the rest
if (module == null) {
// no DIR stream with offsets yet, so store the compressed bytes for later
module = new ModuleImpl();
modules.put(name, module);
try (InputStream dis = new DocumentInputStream(documentNode)) {
module.read(dis);
}
} else if (module.buf == null) { //if we haven't already read the bytes for the module keyed off this name...
if (module.offset == null) {
//This should not happen. bug 59858
throw new IOException("Module offset for '" + name + "' was never read.");
}
//try the general case, where module.offset is accurate
try (InputStream compressed = new DocumentInputStream(documentNode)) {
// we know the offset already, so decompress immediately on-the-fly
trySkip(compressed, module.offset);
try (InputStream decompressed = new RLEDecompressingInputStream(compressed)) {
module.read(decompressed);
}
return;
} catch (IllegalArgumentException | IllegalStateException ignored) {
}
//bad module.offset, try brute force
byte[] decompressedBytes;
try (InputStream compressed = new DocumentInputStream(documentNode)) {
decompressedBytes = findCompressedStreamWBruteForce(compressed);
}
if (decompressedBytes != null) {
module.read(UnsynchronizedByteArrayInputStream.builder().setByteArray(decompressedBytes).get());
}
}
}
/**
* Skips {@code n} bytes in an input stream, throwing IOException if the
* number of bytes skipped is different than requested.
* @throws IOException If skipping would exceed the available data or skipping did not work.
*/
private static void trySkip(InputStream in, long n) throws IOException {
long skippedBytes = IOUtils.skipFully(in, n);
if (skippedBytes != n) {
if (skippedBytes < 0) {
throw new IOException(
"Tried skipping " + n + " bytes, but no bytes were skipped. "
+ "The end of the stream has been reached or the stream is closed.");
} else {
throw new IOException(
"Tried skipping " + n + " bytes, but only " + skippedBytes + " bytes were skipped. "
+ "This should never happen with a non-corrupt file.");
}
}
}
// Constants from MS-OVBA: https://msdn.microsoft.com/en-us/library/office/cc313094(v=office.12).aspx
private static final int STREAMNAME_RESERVED = 0x0032;
private static final int PROJECT_CONSTANTS_RESERVED = 0x003C;
private static final int HELP_FILE_PATH_RESERVED = 0x003D;
private static final int REFERENCE_NAME_RESERVED = 0x003E;
private static final int DOC_STRING_RESERVED = 0x0040;
private static final int MODULE_DOCSTRING_RESERVED = 0x0048;
/**
* Reads VBA Project modules from a VBA Project directory located at
* {@code macroDir} into {@code modules}.
*
* @since 3.15-beta2
*/
protected void readMacros(DirectoryNode macroDir, ModuleMap modules) throws IOException {
//bug59858 shows that dirstream may not be in this directory (\MBD00082648\_VBA_PROJECT_CUR\VBA ENTRY NAME)
//but may be in another directory (\_VBA_PROJECT_CUR\VBA ENTRY NAME)
//process the dirstream first -- "dir" is case insensitive
for (String entryName : macroDir.getEntryNames()) {
if ("dir".equalsIgnoreCase(entryName)) {
processDirStream(macroDir.getEntryCaseInsensitive(entryName), modules);
break;
}
}
for (Entry entry : macroDir) {
if (! (entry instanceof DocumentNode)) { continue; }
String name = entry.getName();
DocumentNode document = (DocumentNode)entry;
if (! "dir".equalsIgnoreCase(name) && !startsWithIgnoreCase(name, "__SRP")
&& !startsWithIgnoreCase(name, "_VBA_PROJECT")) {
// process module, skip __SRP and _VBA_PROJECT since these do not contain macros
readModuleFromDocumentStream(document, name, modules);
}
}
}
protected void findProjectProperties(DirectoryNode node, Map<String, String> moduleNameMap, ModuleMap modules) throws IOException {
for (Entry entry : node) {
if ("project".equalsIgnoreCase(entry.getName())) {
DocumentNode document = (DocumentNode)entry;
try(DocumentInputStream dis = new DocumentInputStream(document)) {
readProjectProperties(dis, moduleNameMap, modules);
return;
}
} else if (entry instanceof DirectoryNode) {
findProjectProperties((DirectoryNode)entry, moduleNameMap, modules);
}
}
}
protected void findModuleNameMap(DirectoryNode node, Map<String, String> moduleNameMap, ModuleMap modules) throws IOException {
for (Entry entry : node) {
if ("projectwm".equalsIgnoreCase(entry.getName())) {
DocumentNode document = (DocumentNode)entry;
try(DocumentInputStream dis = new DocumentInputStream(document)) {
readNameMapRecords(dis, moduleNameMap, modules.charset);
return;
}
} else if (entry.isDirectoryEntry()) {
findModuleNameMap((DirectoryNode)entry, moduleNameMap, modules);
}
}
}
private enum RecordType {
// Constants from MS-OVBA: https://msdn.microsoft.com/en-us/library/office/cc313094(v=office.12).aspx
MODULE_OFFSET(0x0031),
PROJECT_SYS_KIND(0x01),
PROJECT_LCID(0x0002),
PROJECT_LCID_INVOKE(0x14),
PROJECT_CODEPAGE(0x0003),
PROJECT_NAME(0x04),
PROJECT_DOC_STRING(0x05),
PROJECT_HELP_FILE_PATH(0x06),
PROJECT_HELP_CONTEXT(0x07, 8),
PROJECT_LIB_FLAGS(0x08),
PROJECT_VERSION(0x09, 10),
PROJECT_CONSTANTS(0x0C),
PROJECT_MODULES(0x0F),
DIR_STREAM_TERMINATOR(0x10),
PROJECT_COOKIE(0x13),
MODULE_NAME(0x19),
MODULE_NAME_UNICODE(0x47),
MODULE_STREAM_NAME(0x1A),
MODULE_DOC_STRING(0x1C),
MODULE_HELP_CONTEXT(0x1E),
MODULE_COOKIE(0x2c),
MODULE_TYPE_PROCEDURAL(0x21, 4),
MODULE_TYPE_OTHER(0x22, 4),
MODULE_PRIVATE(0x28, 4),
REFERENCE_NAME(0x16),
REFERENCE_REGISTERED(0x0D),
REFERENCE_PROJECT(0x0E),
REFERENCE_CONTROL_A(0x2F),
//according to the spec, REFERENCE_CONTROL_B(0x33) should have the
//same structure as REFERENCE_CONTROL_A(0x2F).
//However, it seems to have the int(length) record structure that most others do.
//See 59830.xls for this record.
REFERENCE_CONTROL_B(0x33),
//REFERENCE_ORIGINAL(0x33),
MODULE_TERMINATOR(0x002B),
EOF(-1),
UNKNOWN(-2);
private final int id;
private final int constantLength;
RecordType(int id) {
this.id = id;
this.constantLength = -1;
}
RecordType(int id, int constantLength) {
this.id = id;
this.constantLength = constantLength;
}
int getConstantLength() {
return constantLength;
}
static RecordType lookup(int id) {
for (RecordType type : RecordType.values()) {
if (type.id == id) {
return type;
}
}
return UNKNOWN;
}
}
private enum DIR_STATE {
INFORMATION_RECORD,
REFERENCES_RECORD,
MODULES_RECORD
}
private static class ASCIIUnicodeStringPair {
private final String ascii;
private final String unicode;
private final int pushbackRecordId;
ASCIIUnicodeStringPair(String ascii, int pushbackRecordId) {
this.ascii = ascii;
this.unicode = "";
this.pushbackRecordId = pushbackRecordId;
}
ASCIIUnicodeStringPair(String ascii, String unicode) {
this.ascii = ascii;
this.unicode = unicode;
pushbackRecordId = -1;
}
private String getAscii() {
return ascii;
}
private String getUnicode() {
return unicode;
}
private int getPushbackRecordId() {
return pushbackRecordId;
}
}
private void processDirStream(Entry dir, ModuleMap modules) throws IOException {
DocumentNode dirDocumentNode = (DocumentNode)dir;
DIR_STATE dirState = DIR_STATE.INFORMATION_RECORD;
try (DocumentInputStream dis = new DocumentInputStream(dirDocumentNode)) {
String streamName = null;
int recordId = 0;
try (RLEDecompressingInputStream in = new RLEDecompressingInputStream(dis)) {
while (true) {
recordId = in.readShort();
if (recordId == -1) {
break;
}
RecordType type = RecordType.lookup(recordId);
if (type.equals(RecordType.EOF) || type.equals(RecordType.DIR_STREAM_TERMINATOR)) {
break;
}
switch (type) {
case PROJECT_VERSION:
trySkip(in, RecordType.PROJECT_VERSION.getConstantLength());
break;
case PROJECT_CODEPAGE:
in.readInt();//record size must == 4
int codepage = in.readShort();
modules.charset = Charset.forName(CodePageUtil.codepageToEncoding(codepage, true));
break;
case MODULE_STREAM_NAME:
ASCIIUnicodeStringPair pair = readStringPair(in, modules.charset, STREAMNAME_RESERVED);
streamName = pair.getAscii();
break;
case PROJECT_DOC_STRING:
readStringPair(in, modules.charset, DOC_STRING_RESERVED);
break;
case PROJECT_HELP_FILE_PATH:
readStringPair(in, modules.charset, HELP_FILE_PATH_RESERVED);
break;
case PROJECT_CONSTANTS:
readStringPair(in, modules.charset, PROJECT_CONSTANTS_RESERVED);
break;
case REFERENCE_NAME:
if (dirState.equals(DIR_STATE.INFORMATION_RECORD)) {
dirState = DIR_STATE.REFERENCES_RECORD;
}
ASCIIUnicodeStringPair stringPair = readStringPair(in,
modules.charset, REFERENCE_NAME_RESERVED, false);
if (stringPair.getPushbackRecordId() == -1) {
break;
}
//Special handling for when there's only an ascii string and a REFERENCED_REGISTERED
//record that follows.
//See https://github.com/decalage2/oletools/blob/master/oletools/olevba.py#L1516
//and https://github.com/decalage2/oletools/pull/135 from (@c1fe)
if (stringPair.getPushbackRecordId() != RecordType.REFERENCE_REGISTERED.id) {
throw new IllegalArgumentException("Unexpected reserved character. "+
"Expected "+Integer.toHexString(REFERENCE_NAME_RESERVED)
+ " or "+Integer.toHexString(RecordType.REFERENCE_REGISTERED.id)+
" not: "+Integer.toHexString(stringPair.getPushbackRecordId()));
}
//fall through!
case REFERENCE_REGISTERED:
//REFERENCE_REGISTERED must come immediately after
//REFERENCE_NAME to allow for fall through in special case of bug 62625
int recLength = in.readInt();
trySkip(in, recLength);
break;
case MODULE_DOC_STRING:
int modDocStringLength = in.readInt();
readString(in, modDocStringLength, modules.charset);
int modDocStringReserved = in.readShort();
if (modDocStringReserved != MODULE_DOCSTRING_RESERVED) {
throw new IOException("Expected x003C after stream name before Unicode stream name, but found: " +
Integer.toHexString(modDocStringReserved));
}
int unicodeModDocStringLength = in.readInt();
readUnicodeString(in, unicodeModDocStringLength);
// do something with this at some point
break;
case MODULE_OFFSET:
int modOffsetSz = in.readInt();
//should be 4
readModuleMetadataFromDirStream(in, streamName, modules);
break;
case PROJECT_MODULES:
dirState = DIR_STATE.MODULES_RECORD;
in.readInt();//size must == 2
in.readShort();//number of modules
break;
case REFERENCE_CONTROL_A:
int szTwiddled = in.readInt();
trySkip(in, szTwiddled);
int nextRecord = in.readShort();
//reference name is optional!
if (nextRecord == RecordType.REFERENCE_NAME.id) {
readStringPair(in, modules.charset, REFERENCE_NAME_RESERVED);
nextRecord = in.readShort();
}
if (nextRecord != 0x30) {
throw new IOException("Expected 0x30 as Reserved3 in a ReferenceControl record");
}
int szExtended = in.readInt();
trySkip(in, szExtended);
break;
case MODULE_TERMINATOR:
int endOfModulesReserved = in.readInt();
//must be 0;
break;
default:
if (type.getConstantLength() > -1) {
trySkip(in, type.getConstantLength());
} else {
int recordLength = in.readInt();
trySkip(in, recordLength);
}
break;
}
}
} catch (final IOException e) {
throw new IOException(
"Error occurred while reading macros at section id "
+ recordId + " (" + HexDump.shortToHex(recordId) + ")", e);
}
}
}
private ASCIIUnicodeStringPair readStringPair(RLEDecompressingInputStream in,
Charset charset, int reservedByte) throws IOException {
return readStringPair(in, charset, reservedByte, true);
}
private ASCIIUnicodeStringPair readStringPair(RLEDecompressingInputStream in,
Charset charset, int reservedByte,
boolean throwOnUnexpectedReservedByte) throws IOException {
int nameLength = in.readInt();
String ascii = readString(in, nameLength, charset);
int reserved = in.readShort();
if (reserved != reservedByte) {
if (throwOnUnexpectedReservedByte) {
throw new IOException("Expected " + Integer.toHexString(reservedByte) +
"after name before Unicode name, but found: " +
Integer.toHexString(reserved));
} else {
return new ASCIIUnicodeStringPair(ascii, reserved);
}
}
int unicodeNameRecordLength = in.readInt();
String unicode = readUnicodeString(in, unicodeNameRecordLength);
return new ASCIIUnicodeStringPair(ascii, unicode);
}
protected void readNameMapRecords(InputStream is,
Map<String, String> moduleNames, Charset charset) throws IOException {
//see 2.3.3 PROJECTwm Stream: Module Name Information
//multibytecharstring
String mbcs;
String unicode;
//arbitrary sanity threshold
final int maxNameRecords = 10000;
int records = 0;
while (++records < maxNameRecords) {
try {
int b = IOUtils.readByte(is);
//check for two 0x00 that mark end of record
if (b == 0) {
b = IOUtils.readByte(is);
if (b == 0) {
return;
}
}
mbcs = readMBCS(b, is, charset);
} catch (EOFException e) {
return;
}
try {
unicode = readUnicode(is);
} catch (EOFException e) {
return;
}
if (StringUtil.isNotBlank(mbcs) && StringUtil.isNotBlank(unicode)) {
moduleNames.put(mbcs, unicode);
}
}
LOGGER.atWarn().log("Hit max name records to read (" + maxNameRecords + "). Stopped early.");
}
private static String readUnicode(InputStream is) throws IOException {
//reads null-terminated unicode string
try (UnsynchronizedByteArrayOutputStream bos = UnsynchronizedByteArrayOutputStream.builder().get()) {
int b0 = IOUtils.readByte(is);
int b1 = IOUtils.readByte(is);
int read = 2;
while ((b0 + b1) != 0 && read < MAX_STRING_LENGTH) {
bos.write(b0);
bos.write(b1);
b0 = IOUtils.readByte(is);
b1 = IOUtils.readByte(is);
read += 2;
}
if (read >= MAX_STRING_LENGTH) {
LOGGER.atWarn().log("stopped reading unicode name after {} bytes", box(read));
}
return bos.toString(StandardCharsets.UTF_16LE);
}
}
private static String readMBCS(int firstByte, InputStream is, Charset charset) throws IOException {
try (UnsynchronizedByteArrayOutputStream bos = UnsynchronizedByteArrayOutputStream.builder().get()) {
int len = 0;
int b = firstByte;
while (b > 0 && len < MAX_STRING_LENGTH) {
++len;
bos.write(b);
b = IOUtils.readByte(is);
}
return bos.toString(charset);
}
}
/**
* Read {@code length} bytes of MBCS (multi-byte character set) characters from the stream
*
* @param stream the inputstream to read from
* @param length number of bytes to read from stream
* @param charset the character set encoding of the bytes in the stream
* @return a java String in the supplied character set
* @throws IOException If reading from the stream fails
*/
private static String readString(InputStream stream, int length, Charset charset) throws IOException {
byte[] buffer = IOUtils.safelyAllocate(length, MAX_STRING_LENGTH);
int bytesRead = IOUtils.readFully(stream, buffer);
if (bytesRead != length) {
throw new IOException("Tried to read: "+length +
", but could only read: "+bytesRead);
}
return new String(buffer, 0, length, charset);
}
protected void readProjectProperties(DocumentInputStream dis,
Map<String, String> moduleNameMap, ModuleMap modules) throws IOException {
InputStreamReader reader = new InputStreamReader(dis, modules.charset);
StringBuilder builder = new StringBuilder();
char[] buffer = new char[512];
int read;
while ((read = reader.read(buffer)) >= 0) {
builder.append(buffer, 0, read);
}
String properties = builder.toString();
//the module name map names should be in exactly the same order
//as the module names here. See 2.3.3 PROJECTwm Stream.
//At some point, we might want to enforce that.
for (String line : properties.split("\r\n|\n\r")) {
if (!line.startsWith("[")) {
String[] tokens = line.split("=");
if (tokens.length > 1 && tokens[1].length() > 1
&& tokens[1].startsWith("\"") && tokens[1].endsWith("\"")) {
// Remove any double quotes
tokens[1] = tokens[1].substring(1, tokens[1].length() - 1);
}
if ("Document".equals(tokens[0]) && tokens.length > 1) {
String mn = tokens[1].substring(0, tokens[1].indexOf("/&H"));
ModuleImpl module = getModule(mn, moduleNameMap, modules);
if (module != null) {
module.moduleType = ModuleType.Document;
} else {
LOGGER.atWarn().log("couldn't find module with name: {}", mn);
}
} else if ("Module".equals(tokens[0]) && tokens.length > 1) {
ModuleImpl module = getModule(tokens[1], moduleNameMap, modules);
if (module != null) {
module.moduleType = ModuleType.Module;
} else {
LOGGER.atWarn().log("couldn't find module with name: {}", tokens[1]);
}
} else if ("Class".equals(tokens[0]) && tokens.length > 1) {
ModuleImpl module = getModule(tokens[1], moduleNameMap, modules);
if (module != null) {
module.moduleType = ModuleType.Class;
} else {
LOGGER.atWarn().log("couldn't find module with name: {}", tokens[1]);
}
}
}
}
}
//can return null!
private ModuleImpl getModule(String moduleName, Map<String, String> moduleNameMap, ModuleMap moduleMap) {
if (moduleNameMap.containsKey(moduleName)) {
return moduleMap.get(moduleNameMap.get(moduleName));
}
return moduleMap.get(moduleName);
}
private String readUnicodeString(RLEDecompressingInputStream in, int unicodeNameRecordLength) throws IOException {
byte[] buffer = IOUtils.safelyAllocate(unicodeNameRecordLength, MAX_STRING_LENGTH);
int bytesRead = IOUtils.readFully(in, buffer);
if (bytesRead != unicodeNameRecordLength) {
throw new EOFException();
}
return new String(buffer, StringUtil.UTF16LE);
}
/**
* Sometimes the offset record in the dirstream is incorrect, but the macro can still be found.
* This will try to find the first RLEDecompressing stream that starts with "Attribute".
* This relies on some, er, heuristics, admittedly.
*
* @param is full module inputstream to read
* @return uncompressed bytes if found, {@code null} otherwise
* @throws IOException for a true IOException copying the is to a byte array
*/
private static byte[] findCompressedStreamWBruteForce(InputStream is) throws IOException {
//buffer to memory for multiple tries
byte[] compressed = IOUtils.toByteArray(is);
byte[] decompressed = null;
for (int i = 0; i < compressed.length; i++) {
if (compressed[i] == 0x01 && i < compressed.length-1) {
int w = LittleEndian.getUShort(compressed, i+1);
if (w <= 0 || (w & 0x7000) != 0x3000) {
continue;
}
decompressed = tryToDecompress(UnsynchronizedByteArrayInputStream.builder().
setByteArray(compressed).
setOffset(i).
setLength(compressed.length - i).
get());
if (decompressed != null) {
if (decompressed.length > 9) {
//this is a complete hack. The challenge is that there
//can be many 0 length or junk streams that are uncompressed
//look in the first 20 characters for "Attribute"
int firstX = Math.min(20, decompressed.length);
String start = new String(decompressed, 0, firstX, StringUtil.WIN_1252);
if (start.contains("Attribute")) {
return decompressed;
}
}
}
}
}
return decompressed;
}
private static byte[] tryToDecompress(InputStream is) {
try (RLEDecompressingInputStream ris = new RLEDecompressingInputStream(is)) {
return IOUtils.toByteArray(ris);
} catch (IllegalArgumentException | IOException | IllegalStateException e){
return null;
}
}
}
|
googleapis/google-cloud-java | 35,940 | java-retail/proto-google-cloud-retail-v2alpha/src/main/java/com/google/cloud/retail/v2alpha/ListGenerativeQuestionConfigsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/retail/v2alpha/generative_question_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.retail.v2alpha;
/**
*
*
* <pre>
* Response for ListQuestions method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse}
*/
public final class ListGenerativeQuestionConfigsResponse
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse)
ListGenerativeQuestionConfigsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListGenerativeQuestionConfigsResponse.newBuilder() to construct.
private ListGenerativeQuestionConfigsResponse(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListGenerativeQuestionConfigsResponse() {
generativeQuestionConfigs_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListGenerativeQuestionConfigsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2alpha.GenerativeQuestionServiceProto
.internal_static_google_cloud_retail_v2alpha_ListGenerativeQuestionConfigsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2alpha.GenerativeQuestionServiceProto
.internal_static_google_cloud_retail_v2alpha_ListGenerativeQuestionConfigsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse.class,
com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse.Builder.class);
}
public static final int GENERATIVE_QUESTION_CONFIGS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.retail.v2alpha.GenerativeQuestionConfig>
generativeQuestionConfigs_;
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2alpha.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.retail.v2alpha.GenerativeQuestionConfig>
getGenerativeQuestionConfigsList() {
return generativeQuestionConfigs_;
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2alpha.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.retail.v2alpha.GenerativeQuestionConfigOrBuilder>
getGenerativeQuestionConfigsOrBuilderList() {
return generativeQuestionConfigs_;
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2alpha.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
@java.lang.Override
public int getGenerativeQuestionConfigsCount() {
return generativeQuestionConfigs_.size();
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2alpha.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.retail.v2alpha.GenerativeQuestionConfig getGenerativeQuestionConfigs(
int index) {
return generativeQuestionConfigs_.get(index);
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2alpha.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.retail.v2alpha.GenerativeQuestionConfigOrBuilder
getGenerativeQuestionConfigsOrBuilder(int index) {
return generativeQuestionConfigs_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < generativeQuestionConfigs_.size(); i++) {
output.writeMessage(1, generativeQuestionConfigs_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < generativeQuestionConfigs_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
1, generativeQuestionConfigs_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse)) {
return super.equals(obj);
}
com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse other =
(com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse) obj;
if (!getGenerativeQuestionConfigsList().equals(other.getGenerativeQuestionConfigsList()))
return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getGenerativeQuestionConfigsCount() > 0) {
hash = (37 * hash) + GENERATIVE_QUESTION_CONFIGS_FIELD_NUMBER;
hash = (53 * hash) + getGenerativeQuestionConfigsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for ListQuestions method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse)
com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2alpha.GenerativeQuestionServiceProto
.internal_static_google_cloud_retail_v2alpha_ListGenerativeQuestionConfigsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2alpha.GenerativeQuestionServiceProto
.internal_static_google_cloud_retail_v2alpha_ListGenerativeQuestionConfigsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse.class,
com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse.Builder.class);
}
// Construct using
// com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (generativeQuestionConfigsBuilder_ == null) {
generativeQuestionConfigs_ = java.util.Collections.emptyList();
} else {
generativeQuestionConfigs_ = null;
generativeQuestionConfigsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.retail.v2alpha.GenerativeQuestionServiceProto
.internal_static_google_cloud_retail_v2alpha_ListGenerativeQuestionConfigsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse
getDefaultInstanceForType() {
return com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse build() {
com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse buildPartial() {
com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse result =
new com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse result) {
if (generativeQuestionConfigsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
generativeQuestionConfigs_ =
java.util.Collections.unmodifiableList(generativeQuestionConfigs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.generativeQuestionConfigs_ = generativeQuestionConfigs_;
} else {
result.generativeQuestionConfigs_ = generativeQuestionConfigsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse) {
return mergeFrom(
(com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse other) {
if (other
== com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse
.getDefaultInstance()) return this;
if (generativeQuestionConfigsBuilder_ == null) {
if (!other.generativeQuestionConfigs_.isEmpty()) {
if (generativeQuestionConfigs_.isEmpty()) {
generativeQuestionConfigs_ = other.generativeQuestionConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.addAll(other.generativeQuestionConfigs_);
}
onChanged();
}
} else {
if (!other.generativeQuestionConfigs_.isEmpty()) {
if (generativeQuestionConfigsBuilder_.isEmpty()) {
generativeQuestionConfigsBuilder_.dispose();
generativeQuestionConfigsBuilder_ = null;
generativeQuestionConfigs_ = other.generativeQuestionConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
generativeQuestionConfigsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getGenerativeQuestionConfigsFieldBuilder()
: null;
} else {
generativeQuestionConfigsBuilder_.addAllMessages(other.generativeQuestionConfigs_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.retail.v2alpha.GenerativeQuestionConfig m =
input.readMessage(
com.google.cloud.retail.v2alpha.GenerativeQuestionConfig.parser(),
extensionRegistry);
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.add(m);
} else {
generativeQuestionConfigsBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.retail.v2alpha.GenerativeQuestionConfig>
generativeQuestionConfigs_ = java.util.Collections.emptyList();
private void ensureGenerativeQuestionConfigsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
generativeQuestionConfigs_ =
new java.util.ArrayList<com.google.cloud.retail.v2alpha.GenerativeQuestionConfig>(
generativeQuestionConfigs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2alpha.GenerativeQuestionConfig,
com.google.cloud.retail.v2alpha.GenerativeQuestionConfig.Builder,
com.google.cloud.retail.v2alpha.GenerativeQuestionConfigOrBuilder>
generativeQuestionConfigsBuilder_;
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2alpha.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public java.util.List<com.google.cloud.retail.v2alpha.GenerativeQuestionConfig>
getGenerativeQuestionConfigsList() {
if (generativeQuestionConfigsBuilder_ == null) {
return java.util.Collections.unmodifiableList(generativeQuestionConfigs_);
} else {
return generativeQuestionConfigsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2alpha.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public int getGenerativeQuestionConfigsCount() {
if (generativeQuestionConfigsBuilder_ == null) {
return generativeQuestionConfigs_.size();
} else {
return generativeQuestionConfigsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2alpha.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public com.google.cloud.retail.v2alpha.GenerativeQuestionConfig getGenerativeQuestionConfigs(
int index) {
if (generativeQuestionConfigsBuilder_ == null) {
return generativeQuestionConfigs_.get(index);
} else {
return generativeQuestionConfigsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2alpha.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public Builder setGenerativeQuestionConfigs(
int index, com.google.cloud.retail.v2alpha.GenerativeQuestionConfig value) {
if (generativeQuestionConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.set(index, value);
onChanged();
} else {
generativeQuestionConfigsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2alpha.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public Builder setGenerativeQuestionConfigs(
int index,
com.google.cloud.retail.v2alpha.GenerativeQuestionConfig.Builder builderForValue) {
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.set(index, builderForValue.build());
onChanged();
} else {
generativeQuestionConfigsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2alpha.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public Builder addGenerativeQuestionConfigs(
com.google.cloud.retail.v2alpha.GenerativeQuestionConfig value) {
if (generativeQuestionConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.add(value);
onChanged();
} else {
generativeQuestionConfigsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2alpha.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public Builder addGenerativeQuestionConfigs(
int index, com.google.cloud.retail.v2alpha.GenerativeQuestionConfig value) {
if (generativeQuestionConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.add(index, value);
onChanged();
} else {
generativeQuestionConfigsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2alpha.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public Builder addGenerativeQuestionConfigs(
com.google.cloud.retail.v2alpha.GenerativeQuestionConfig.Builder builderForValue) {
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.add(builderForValue.build());
onChanged();
} else {
generativeQuestionConfigsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2alpha.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public Builder addGenerativeQuestionConfigs(
int index,
com.google.cloud.retail.v2alpha.GenerativeQuestionConfig.Builder builderForValue) {
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.add(index, builderForValue.build());
onChanged();
} else {
generativeQuestionConfigsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2alpha.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public Builder addAllGenerativeQuestionConfigs(
java.lang.Iterable<? extends com.google.cloud.retail.v2alpha.GenerativeQuestionConfig>
values) {
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, generativeQuestionConfigs_);
onChanged();
} else {
generativeQuestionConfigsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2alpha.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public Builder clearGenerativeQuestionConfigs() {
if (generativeQuestionConfigsBuilder_ == null) {
generativeQuestionConfigs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
generativeQuestionConfigsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2alpha.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public Builder removeGenerativeQuestionConfigs(int index) {
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.remove(index);
onChanged();
} else {
generativeQuestionConfigsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2alpha.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public com.google.cloud.retail.v2alpha.GenerativeQuestionConfig.Builder
getGenerativeQuestionConfigsBuilder(int index) {
return getGenerativeQuestionConfigsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2alpha.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public com.google.cloud.retail.v2alpha.GenerativeQuestionConfigOrBuilder
getGenerativeQuestionConfigsOrBuilder(int index) {
if (generativeQuestionConfigsBuilder_ == null) {
return generativeQuestionConfigs_.get(index);
} else {
return generativeQuestionConfigsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2alpha.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public java.util.List<
? extends com.google.cloud.retail.v2alpha.GenerativeQuestionConfigOrBuilder>
getGenerativeQuestionConfigsOrBuilderList() {
if (generativeQuestionConfigsBuilder_ != null) {
return generativeQuestionConfigsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(generativeQuestionConfigs_);
}
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2alpha.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public com.google.cloud.retail.v2alpha.GenerativeQuestionConfig.Builder
addGenerativeQuestionConfigsBuilder() {
return getGenerativeQuestionConfigsFieldBuilder()
.addBuilder(
com.google.cloud.retail.v2alpha.GenerativeQuestionConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2alpha.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public com.google.cloud.retail.v2alpha.GenerativeQuestionConfig.Builder
addGenerativeQuestionConfigsBuilder(int index) {
return getGenerativeQuestionConfigsFieldBuilder()
.addBuilder(
index, com.google.cloud.retail.v2alpha.GenerativeQuestionConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2alpha.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public java.util.List<com.google.cloud.retail.v2alpha.GenerativeQuestionConfig.Builder>
getGenerativeQuestionConfigsBuilderList() {
return getGenerativeQuestionConfigsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2alpha.GenerativeQuestionConfig,
com.google.cloud.retail.v2alpha.GenerativeQuestionConfig.Builder,
com.google.cloud.retail.v2alpha.GenerativeQuestionConfigOrBuilder>
getGenerativeQuestionConfigsFieldBuilder() {
if (generativeQuestionConfigsBuilder_ == null) {
generativeQuestionConfigsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2alpha.GenerativeQuestionConfig,
com.google.cloud.retail.v2alpha.GenerativeQuestionConfig.Builder,
com.google.cloud.retail.v2alpha.GenerativeQuestionConfigOrBuilder>(
generativeQuestionConfigs_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
generativeQuestionConfigs_ = null;
}
return generativeQuestionConfigsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse)
private static final com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse();
}
public static com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListGenerativeQuestionConfigsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListGenerativeQuestionConfigsResponse>() {
@java.lang.Override
public ListGenerativeQuestionConfigsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListGenerativeQuestionConfigsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListGenerativeQuestionConfigsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.retail.v2alpha.ListGenerativeQuestionConfigsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/poi | 36,189 | poi/src/main/java/org/apache/poi/ss/formula/FormulaShifter.java | /* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.ss.formula;
import org.apache.poi.ss.SpreadsheetVersion;
import org.apache.poi.ss.formula.ptg.Area2DPtgBase;
import org.apache.poi.ss.formula.ptg.Area3DPtg;
import org.apache.poi.ss.formula.ptg.Area3DPxg;
import org.apache.poi.ss.formula.ptg.AreaErrPtg;
import org.apache.poi.ss.formula.ptg.AreaPtg;
import org.apache.poi.ss.formula.ptg.AreaPtgBase;
import org.apache.poi.ss.formula.ptg.Deleted3DPxg;
import org.apache.poi.ss.formula.ptg.DeletedArea3DPtg;
import org.apache.poi.ss.formula.ptg.DeletedRef3DPtg;
import org.apache.poi.ss.formula.ptg.Ptg;
import org.apache.poi.ss.formula.ptg.Ref3DPtg;
import org.apache.poi.ss.formula.ptg.Ref3DPxg;
import org.apache.poi.ss.formula.ptg.RefErrorPtg;
import org.apache.poi.ss.formula.ptg.RefPtg;
import org.apache.poi.ss.formula.ptg.RefPtgBase;
/**
* Updates Formulas as rows or sheets are shifted
*/
public final class FormulaShifter {
private enum ShiftMode {
RowMove,
RowCopy,
/** @since POI 4.0.0 */
ColumnMove,
/** @since POI 4.0.0 */
ColumnCopy,
SheetMove,
}
/**
* Extern sheet index of sheet where moving is occurring,
* used for updating HSSF style 3D references
*/
private final int _externSheetIndex;
/**
* Sheet name of the sheet where moving is occurring,
* used for updating XSSF style 3D references on row shifts.
*/
private final String _sheetName;
private final int _firstMovedIndex;
private final int _lastMovedIndex;
private final int _amountToMove;
private final int _srcSheetIndex;
private final int _dstSheetIndex;
private final SpreadsheetVersion _version;
private final ShiftMode _mode;
/**
* Create an instance for shifting row.
*
* For example, this will be called on {@link org.apache.poi.hssf.usermodel.HSSFSheet#shiftRows(int, int, int)} }
*/
private FormulaShifter(int externSheetIndex, String sheetName, int firstMovedIndex, int lastMovedIndex, int amountToMove,
ShiftMode mode, SpreadsheetVersion version) {
if (firstMovedIndex > lastMovedIndex) {
throw new IllegalArgumentException("firstMovedIndex, lastMovedIndex out of order");
}
_externSheetIndex = externSheetIndex;
_sheetName = sheetName;
_firstMovedIndex = firstMovedIndex;
_lastMovedIndex = lastMovedIndex;
_amountToMove = amountToMove;
_mode = mode;
_version = version;
_srcSheetIndex = _dstSheetIndex = -1;
}
/**
* Create an instance for shifting sheets.
*
* For example, this will be called on {@link org.apache.poi.hssf.usermodel.HSSFWorkbook#setSheetOrder(String, int)}
*/
private FormulaShifter(int srcSheetIndex, int dstSheetIndex) {
_externSheetIndex = _firstMovedIndex = _lastMovedIndex = _amountToMove = -1;
_sheetName = null;
_version = null;
_srcSheetIndex = srcSheetIndex;
_dstSheetIndex = dstSheetIndex;
_mode = ShiftMode.SheetMove;
}
public static FormulaShifter createForRowShift(int externSheetIndex, String sheetName, int firstMovedRowIndex, int lastMovedRowIndex, int numberOfRowsToMove,
SpreadsheetVersion version) {
return new FormulaShifter(externSheetIndex, sheetName, firstMovedRowIndex, lastMovedRowIndex, numberOfRowsToMove, ShiftMode.RowMove, version);
}
public static FormulaShifter createForRowCopy(int externSheetIndex, String sheetName, int firstMovedRowIndex, int lastMovedRowIndex, int numberOfRowsToMove,
SpreadsheetVersion version) {
return new FormulaShifter(externSheetIndex, sheetName, firstMovedRowIndex, lastMovedRowIndex, numberOfRowsToMove, ShiftMode.RowCopy, version);
}
/**
* @since POI 4.0.0
*/
public static FormulaShifter createForColumnShift(int externSheetIndex, String sheetName, int firstMovedColumnIndex, int lastMovedColumnIndex, int numberOfColumnsToMove,
SpreadsheetVersion version) {
return new FormulaShifter(externSheetIndex, sheetName, firstMovedColumnIndex, lastMovedColumnIndex, numberOfColumnsToMove, ShiftMode.ColumnMove, version);
}
/**
* @since POI 4.0.0
*/
public static FormulaShifter createForColumnCopy(int externSheetIndex, String sheetName, int firstMovedColumnIndex, int lastMovedColumnIndex, int numberOfColumnsToMove,
SpreadsheetVersion version) {
return new FormulaShifter(externSheetIndex, sheetName, firstMovedColumnIndex, lastMovedColumnIndex, numberOfColumnsToMove, ShiftMode.ColumnCopy, version);
}
public static FormulaShifter createForSheetShift(int srcSheetIndex, int dstSheetIndex) {
return new FormulaShifter(srcSheetIndex, dstSheetIndex);
}
@Override
public String toString() {
return getClass().getName() +
" [" +
_firstMovedIndex +
_lastMovedIndex +
_amountToMove +
"]";
}
/**
* @param ptgs - if necessary, will get modified by this method
* @param currentExternSheetIx - the extern sheet index of the sheet that contains the formula being adjusted
* @return <code>true</code> if a change was made to the formula tokens
*/
public boolean adjustFormula(Ptg[] ptgs, int currentExternSheetIx) {
boolean refsWereChanged = false;
for(int i=0; i<ptgs.length; i++) {
Ptg newPtg = adjustPtg(ptgs[i], currentExternSheetIx);
if (newPtg != null) {
refsWereChanged = true;
ptgs[i] = newPtg;
}
}
return refsWereChanged;
}
private Ptg adjustPtg(Ptg ptg, int currentExternSheetIx) {
switch(_mode){
case RowMove:
return adjustPtgDueToRowMove(ptg, currentExternSheetIx);
case RowCopy:
// Covered Scenarios:
// * row copy on same sheet
// * row copy between different sheets in the same workbook
return adjustPtgDueToRowCopy(ptg);
case ColumnMove:
return adjustPtgDueToColumnMove(ptg, currentExternSheetIx);
case ColumnCopy:
return adjustPtgDueToColumnCopy(ptg);
case SheetMove:
return adjustPtgDueToSheetMove(ptg);
default:
throw new IllegalStateException("Unsupported shift mode: " + _mode);
}
}
/**
* @return in-place modified ptg (if column move would cause Ptg to change),
* deleted ref ptg (if column move causes an error),
* or null (if no Ptg change is needed)
*/
private Ptg adjustPtgDueToMove(Ptg ptg, int currentExternSheetIx, boolean isRowMove) {
if(ptg instanceof RefPtg) {
if (currentExternSheetIx != _externSheetIndex) {
// local refs on other sheets are unaffected
return null;
}
RefPtg rptg = (RefPtg)ptg;
return isRowMove ? rowMoveRefPtg(rptg) : columnMoveRefPtg(rptg);
}
if(ptg instanceof Ref3DPtg) {
Ref3DPtg rptg = (Ref3DPtg)ptg;
if (_externSheetIndex != rptg.getExternSheetIndex()) {
// only move 3D refs that refer to the sheet with cells being moved
// (currentExternSheetIx is irrelevant)
return null;
}
return isRowMove ? rowMoveRefPtg(rptg) : columnMoveRefPtg(rptg);
}
if(ptg instanceof Ref3DPxg) {
Ref3DPxg rpxg = (Ref3DPxg)ptg;
if (rpxg.getExternalWorkbookNumber() > 0 ||
! _sheetName.equalsIgnoreCase(rpxg.getSheetName())) {
// only move 3D refs that refer to the sheet with cells being moved
return null;
}
return isRowMove ? rowMoveRefPtg(rpxg) : columnMoveRefPtg(rpxg);
}
if(ptg instanceof Area2DPtgBase) {
if (currentExternSheetIx != _externSheetIndex) {
// local refs on other sheets are unaffected
return ptg;
}
Area2DPtgBase aptg = (Area2DPtgBase) ptg;
return isRowMove ? rowMoveAreaPtg(aptg) : columnMoveAreaPtg(aptg);
}
if(ptg instanceof Area3DPtg) {
Area3DPtg aptg = (Area3DPtg)ptg;
if (_externSheetIndex != aptg.getExternSheetIndex()) {
// only move 3D refs that refer to the sheet with cells being moved
// (currentExternSheetIx is irrelevant)
return null;
}
return isRowMove ? rowMoveAreaPtg(aptg) : columnMoveAreaPtg(aptg);
}
if(ptg instanceof Area3DPxg) {
Area3DPxg apxg = (Area3DPxg)ptg;
if (apxg.getExternalWorkbookNumber() > 0 ||
! _sheetName.equalsIgnoreCase(apxg.getSheetName())) {
// only move 3D refs that refer to the sheet with cells being moved
return null;
}
return isRowMove ? rowMoveAreaPtg(apxg) : columnMoveAreaPtg(apxg);
}
return null;
}
/**
* @return in-place modified ptg (if row move would cause Ptg to change),
* deleted ref ptg (if row move causes an error),
* or null (if no Ptg change is needed)
*/
private Ptg adjustPtgDueToRowMove(Ptg ptg, int currentExternSheetIx) {
return adjustPtgDueToMove(ptg, currentExternSheetIx, true);
}
/**
* @return in-place modified ptg (if column move would cause Ptg to change),
* deleted ref ptg (if column move causes an error),
* or null (if no Ptg change is needed)
*/
private Ptg adjustPtgDueToColumnMove(Ptg ptg, int currentExternSheetIx) {
return adjustPtgDueToMove(ptg, currentExternSheetIx, false);
}
/**
* Call this on any ptg reference contained in a row or column of cells that was copied.
* If the ptg reference is relative, the references will be shifted by the distance
* that the rows or columns were copied.
*
* @param ptg the ptg to shift
* @return deleted ref ptg, in-place modified ptg, or null
* If Ptg would be shifted off the first or last row or columns of a sheet, return deleted ref
* If Ptg needs to be changed, modifies Ptg in-place
* If Ptg doesn't need to be changed, returns <code>null</code>
*/
private Ptg adjustPtgDueToCopy(Ptg ptg, boolean isRowCopy) {
if(ptg instanceof RefPtg) {
RefPtg rptg = (RefPtg)ptg;
return isRowCopy ? rowCopyRefPtg(rptg) : columnCopyRefPtg(rptg);
}
if(ptg instanceof Ref3DPtg) {
Ref3DPtg rptg = (Ref3DPtg)ptg;
return isRowCopy ? rowCopyRefPtg(rptg) : columnCopyRefPtg(rptg);
}
if(ptg instanceof Ref3DPxg) {
Ref3DPxg rpxg = (Ref3DPxg)ptg;
return isRowCopy ? rowCopyRefPtg(rpxg) : columnCopyRefPtg(rpxg);
}
if(ptg instanceof Area2DPtgBase) {
Area2DPtgBase aptg = (Area2DPtgBase) ptg;
return isRowCopy ? rowCopyAreaPtg(aptg) : columnCopyAreaPtg(aptg);
}
if(ptg instanceof Area3DPtg) {
Area3DPtg aptg = (Area3DPtg)ptg;
return isRowCopy ? rowCopyAreaPtg(aptg) : columnCopyAreaPtg(aptg);
}
if(ptg instanceof Area3DPxg) {
Area3DPxg apxg = (Area3DPxg)ptg;
return isRowCopy ? rowCopyAreaPtg(apxg) : columnCopyAreaPtg(apxg);
}
return null;
}
/**
* Call this on any ptg reference contained in a row of cells that was copied.
* If the ptg reference is relative, the references will be shifted by the distance
* that the rows were copied.
*
* @param ptg the ptg to shift
* @return deleted ref ptg, in-place modified ptg, or null
* If Ptg would be shifted off the first or last row of a sheet, return deleted ref
* If Ptg needs to be changed, modifies Ptg in-place
* If Ptg doesn't need to be changed, returns <code>null</code>
*/
private Ptg adjustPtgDueToRowCopy(Ptg ptg) {
return adjustPtgDueToCopy(ptg, true);
}
/**
* Call this on any ptg reference contained in a column of cells that was copied.
* If the ptg reference is relative, the references will be shifted by the distance
* that the columns were copied.
*
* @param ptg the ptg to shift
* @return deleted ref ptg, in-place modified ptg, or null
* If Ptg would be shifted off the first or last column of a sheet, return deleted ref
* If Ptg needs to be changed, modifies Ptg in-place
* If Ptg doesn't need to be changed, returns <code>null</code>
*/
private Ptg adjustPtgDueToColumnCopy(Ptg ptg) {
return adjustPtgDueToCopy(ptg, false);
}
private Ptg adjustPtgDueToSheetMove(Ptg ptg) {
if(ptg instanceof Ref3DPtg) {
Ref3DPtg ref = (Ref3DPtg)ptg;
int oldSheetIndex = ref.getExternSheetIndex();
// we have to handle a few cases here
// 1. sheet is outside moved sheets, no change necessary
if(oldSheetIndex < _srcSheetIndex &&
oldSheetIndex < _dstSheetIndex) {
return null;
}
if(oldSheetIndex > _srcSheetIndex &&
oldSheetIndex > _dstSheetIndex) {
return null;
}
// 2. ptg refers to the moved sheet
if(oldSheetIndex == _srcSheetIndex) {
ref.setExternSheetIndex(_dstSheetIndex);
return ref;
}
// 3. new index is lower than old one => sheets get moved up
if (_dstSheetIndex < _srcSheetIndex) {
ref.setExternSheetIndex(oldSheetIndex+1);
return ref;
}
// 4. new index is higher than old one => sheets get moved down
if (_dstSheetIndex > _srcSheetIndex) {
ref.setExternSheetIndex(oldSheetIndex-1);
return ref;
}
}
return null;
}
private Ptg rowMoveRefPtg(RefPtgBase rptg) {
int refRow = rptg.getRow();
if (_firstMovedIndex <= refRow && refRow <= _lastMovedIndex) {
// Rows being moved completely enclose the ref.
// - move the area ref along with the rows regardless of destination
rptg.setRow(refRow + _amountToMove);
return rptg;
}
// else rules for adjusting area may also depend on the destination of the moved rows
int destFirstRowIndex = _firstMovedIndex + _amountToMove;
int destLastRowIndex = _lastMovedIndex + _amountToMove;
// ref is outside source rows
// check for clashes with destination
if (destLastRowIndex < refRow || refRow < destFirstRowIndex) {
// destination rows are completely outside ref
return null;
}
if (destFirstRowIndex <= refRow && refRow <= destLastRowIndex) {
// destination rows enclose the area (possibly exactly)
return createDeletedRef(rptg);
}
throw new IllegalStateException("Situation not covered: (" + _firstMovedIndex + ", " +
_lastMovedIndex + ", " + _amountToMove + ", " + refRow + ", " + refRow + ")");
}
private Ptg rowMoveAreaPtg(AreaPtgBase aptg) {
int aFirstRow = aptg.getFirstRow();
int aLastRow = aptg.getLastRow();
if (_firstMovedIndex <= aFirstRow && aLastRow <= _lastMovedIndex) {
// Rows being moved completely enclose the area ref.
// - move the area ref along with the rows regardless of destination
aptg.setFirstRow(aFirstRow + _amountToMove);
aptg.setLastRow(aLastRow + _amountToMove);
return aptg;
}
// else rules for adjusting area may also depend on the destination of the moved rows
int destFirstRowIndex = _firstMovedIndex + _amountToMove;
int destLastRowIndex = _lastMovedIndex + _amountToMove;
if (aFirstRow < _firstMovedIndex && _lastMovedIndex < aLastRow) {
// Rows moved were originally *completely* within the area ref
// If the destination of the rows overlaps either the top
// or bottom of the area ref there will be a change
if (destFirstRowIndex < aFirstRow && aFirstRow <= destLastRowIndex) {
// truncate the top of the area by the moved rows
aptg.setFirstRow(destLastRowIndex+1);
return aptg;
} else if (destFirstRowIndex <= aLastRow && aLastRow < destLastRowIndex) {
// truncate the bottom of the area by the moved rows
aptg.setLastRow(destFirstRowIndex-1);
return aptg;
}
// else - rows have moved completely outside the area ref,
// or still remain completely within the area ref
return null; // - no change to the area
}
if (_firstMovedIndex <= aFirstRow && aFirstRow <= _lastMovedIndex) {
// Rows moved include the first row of the area ref, but not the last row
// btw: (aLastRow > _lastMovedIndex)
if (_amountToMove < 0) {
// simple case - expand area by shifting top upward
aptg.setFirstRow(aFirstRow + _amountToMove);
return aptg;
}
if (destFirstRowIndex > aLastRow) {
// in this case, excel ignores the row move
return null;
}
int newFirstRowIx = aFirstRow + _amountToMove;
if (destLastRowIndex < aLastRow) {
// end of area is preserved (will remain exact same row)
// the top area row is moved simply
aptg.setFirstRow(newFirstRowIx);
return aptg;
}
// else - bottom area row has been replaced - both area top and bottom may move now
int areaRemainingTopRowIx = _lastMovedIndex + 1;
if (destFirstRowIndex > areaRemainingTopRowIx) {
// old top row of area has moved deep within the area, and exposed a new top row
newFirstRowIx = areaRemainingTopRowIx;
}
aptg.setFirstRow(newFirstRowIx);
aptg.setLastRow(Math.max(aLastRow, destLastRowIndex));
return aptg;
}
if (_firstMovedIndex <= aLastRow && aLastRow <= _lastMovedIndex) {
// Rows moved include the last row of the area ref, but not the first
// btw: (aFirstRow < _firstMovedIndex)
if (_amountToMove > 0) {
// simple case - expand area by shifting bottom downward
aptg.setLastRow(aLastRow + _amountToMove);
return aptg;
}
if (destLastRowIndex < aFirstRow) {
// in this case, excel ignores the row move
return null;
}
int newLastRowIx = aLastRow + _amountToMove;
if (destFirstRowIndex > aFirstRow) {
// top of area is preserved (will remain exact same row)
// the bottom area row is moved simply
aptg.setLastRow(newLastRowIx);
return aptg;
}
// else - top area row has been replaced - both area top and bottom may move now
int areaRemainingBottomRowIx = _firstMovedIndex - 1;
if (destLastRowIndex < areaRemainingBottomRowIx) {
// old bottom row of area has moved up deep within the area, and exposed a new bottom row
newLastRowIx = areaRemainingBottomRowIx;
}
aptg.setFirstRow(Math.min(aFirstRow, destFirstRowIndex));
aptg.setLastRow(newLastRowIx);
return aptg;
}
// else source rows include none of the rows of the area ref
// check for clashes with destination
if (destLastRowIndex < aFirstRow || aLastRow < destFirstRowIndex) {
// destination rows are completely outside area ref
return null;
}
if (destFirstRowIndex <= aFirstRow && aLastRow <= destLastRowIndex) {
// destination rows enclose the area (possibly exactly)
return createDeletedRef(aptg);
}
if (aFirstRow <= destFirstRowIndex && destLastRowIndex <= aLastRow) {
// destination rows are within area ref (possibly exact on top or bottom, but not both)
return null; // - no change to area
}
if (destFirstRowIndex < aFirstRow && aFirstRow <= destLastRowIndex) {
// dest rows overlap top of area
// - truncate the top
aptg.setFirstRow(destLastRowIndex+1);
return aptg;
}
if (destFirstRowIndex <= aLastRow && aLastRow < destLastRowIndex) {
// dest rows overlap bottom of area
// - truncate the bottom
aptg.setLastRow(destFirstRowIndex-1);
return aptg;
}
throw new IllegalStateException("Situation not covered: (" + _firstMovedIndex + ", " +
_lastMovedIndex + ", " + _amountToMove + ", " + aFirstRow + ", " + aLastRow + ")");
}
/**
* Modifies rptg in-place and return a reference to rptg if the cell reference
* would move due to a row copy operation
* Returns <code>null</code> or {@link RefErrorPtg} if no change was made
*
* @param rptg The REF that is copied
* @return The Ptg reference if the cell would move due to copy, otherwise null
*/
private Ptg rowCopyRefPtg(RefPtgBase rptg) {
final int refRow = rptg.getRow();
if (rptg.isRowRelative()) {
// check new location where the ref is located
final int destRowIndex = _firstMovedIndex + _amountToMove;
if (destRowIndex < 0 || _version.getLastRowIndex() < destRowIndex) {
return createDeletedRef(rptg);
}
// check new location where the ref points to
final int newRowIndex = refRow + _amountToMove;
if(newRowIndex < 0 || _version.getLastRowIndex() < newRowIndex) {
return createDeletedRef(rptg);
}
rptg.setRow(newRowIndex);
return rptg;
}
return null;
}
/**
* Modifies aptg in-place and return a reference to aptg if the first or last row of
* of the Area reference would move due to a row copy operation
* Returns <code>null</code> or {@link AreaErrPtg} if no change was made
*
* @param aptg The Area that is copied
* @return null, AreaErrPtg, or modified aptg
*/
private Ptg rowCopyAreaPtg(AreaPtgBase aptg) {
boolean changed = false;
final int aFirstRow = aptg.getFirstRow();
final int aLastRow = aptg.getLastRow();
if (aptg.isFirstRowRelative()) {
final int destFirstRowIndex = aFirstRow + _amountToMove;
if (destFirstRowIndex < 0 || _version.getLastRowIndex() < destFirstRowIndex)
return createDeletedRef(aptg);
aptg.setFirstRow(destFirstRowIndex);
changed = true;
}
if (aptg.isLastRowRelative()) {
final int destLastRowIndex = aLastRow + _amountToMove;
if (destLastRowIndex < 0 || _version.getLastRowIndex() < destLastRowIndex)
return createDeletedRef(aptg);
aptg.setLastRow(destLastRowIndex);
changed = true;
}
if (changed) {
aptg.sortTopLeftToBottomRight();
}
return changed ? aptg : null;
}
private Ptg columnMoveRefPtg(RefPtgBase rptg) {
int refColumn = rptg.getColumn();
if (_firstMovedIndex <= refColumn && refColumn <= _lastMovedIndex) {
// Columns being moved completely enclose the ref.
// - move the area ref along with the columns regardless of destination
rptg.setColumn(refColumn + _amountToMove);
return rptg;
}
// else rules for adjusting area may also depend on the destination of the moved columns
int destFirstColumnIndex = _firstMovedIndex + _amountToMove;
int destLastColumnIndex = _lastMovedIndex + _amountToMove;
// ref is outside source columns
// check for clashes with destination
if (destLastColumnIndex < refColumn || refColumn < destFirstColumnIndex) {
// destination columns are completely outside ref
return null;
}
if (destFirstColumnIndex <= refColumn && refColumn <= destLastColumnIndex) {
// destination columns enclose the area (possibly exactly)
return createDeletedRef(rptg);
}
throw new IllegalStateException("Situation not covered: (" + _firstMovedIndex + ", " +
_lastMovedIndex + ", " + _amountToMove + ", " + refColumn + ", " + refColumn + ")");
}
private Ptg columnMoveAreaPtg(AreaPtgBase aptg) {
int aFirstColumn = aptg.getFirstColumn();
int aLastColumn = aptg.getLastColumn();
if (_firstMovedIndex <= aFirstColumn && aLastColumn <= _lastMovedIndex) {
// Columns being moved completely enclose the area ref.
// - move the area ref along with the columns regardless of destination
aptg.setFirstColumn(aFirstColumn + _amountToMove);
aptg.setLastColumn(aLastColumn + _amountToMove);
return aptg;
}
// else rules for adjusting area may also depend on the destination of the moved columns
int destFirstColumnIndex = _firstMovedIndex + _amountToMove;
int destLastColumnIndex = _lastMovedIndex + _amountToMove;
if (aFirstColumn < _firstMovedIndex && _lastMovedIndex < aLastColumn) {
// Columns moved were originally *completely* within the area ref
// If the destination of the columns overlaps either the top
// or bottom of the area ref there will be a change
if (destFirstColumnIndex < aFirstColumn && aFirstColumn <= destLastColumnIndex) {
// truncate the top of the area by the moved columns
aptg.setFirstColumn(destLastColumnIndex+1);
return aptg;
} else if (destFirstColumnIndex <= aLastColumn && aLastColumn < destLastColumnIndex) {
// truncate the bottom of the area by the moved columns
aptg.setLastColumn(destFirstColumnIndex-1);
return aptg;
}
// else - columns have moved completely outside the area ref,
// or still remain completely within the area ref
return null; // - no change to the area
}
if (_firstMovedIndex <= aFirstColumn && aFirstColumn <= _lastMovedIndex) {
// Columns moved include the first column of the area ref, but not the last column
// btw: (aLastColumn > _lastMovedIndex)
if (_amountToMove < 0) {
// simple case - expand area by shifting top upward
aptg.setFirstColumn(aFirstColumn + _amountToMove);
return aptg;
}
if (destFirstColumnIndex > aLastColumn) {
// in this case, excel ignores the column move
return null;
}
int newFirstColumnIx = aFirstColumn + _amountToMove;
if (destLastColumnIndex < aLastColumn) {
// end of area is preserved (will remain exact same column)
// the top area column is moved simply
aptg.setFirstColumn(newFirstColumnIx);
return aptg;
}
// else - bottom area column has been replaced - both area top and bottom may move now
int areaRemainingTopColumnIx = _lastMovedIndex + 1;
if (destFirstColumnIndex > areaRemainingTopColumnIx) {
// old top column of area has moved deep within the area, and exposed a new top column
newFirstColumnIx = areaRemainingTopColumnIx;
}
aptg.setFirstColumn(newFirstColumnIx);
aptg.setLastColumn(Math.max(aLastColumn, destLastColumnIndex));
return aptg;
}
if (_firstMovedIndex <= aLastColumn && aLastColumn <= _lastMovedIndex) {
// Columns moved include the last column of the area ref, but not the first
// btw: (aFirstColumn < _firstMovedIndex)
if (_amountToMove > 0) {
// simple case - expand area by shifting bottom downward
aptg.setLastColumn(aLastColumn + _amountToMove);
return aptg;
}
if (destLastColumnIndex < aFirstColumn) {
// in this case, excel ignores the column move
return null;
}
int newLastColumnIx = aLastColumn + _amountToMove;
if (destFirstColumnIndex > aFirstColumn) {
// top of area is preserved (will remain exact same column)
// the bottom area column is moved simply
aptg.setLastColumn(newLastColumnIx);
return aptg;
}
// else - top area column has been replaced - both area top and bottom may move now
int areaRemainingBottomColumnIx = _firstMovedIndex - 1;
if (destLastColumnIndex < areaRemainingBottomColumnIx) {
// old bottom column of area has moved up deep within the area, and exposed a new bottom column
newLastColumnIx = areaRemainingBottomColumnIx;
}
aptg.setFirstColumn(Math.min(aFirstColumn, destFirstColumnIndex));
aptg.setLastColumn(newLastColumnIx);
return aptg;
}
// else source columns include none of the columns of the area ref
// check for clashes with destination
if (destLastColumnIndex < aFirstColumn || aLastColumn < destFirstColumnIndex) {
// destination columns are completely outside area ref
return null;
}
if (destFirstColumnIndex <= aFirstColumn && aLastColumn <= destLastColumnIndex) {
// destination columns enclose the area (possibly exactly)
return createDeletedRef(aptg);
}
if (aFirstColumn <= destFirstColumnIndex && destLastColumnIndex <= aLastColumn) {
// destination columns are within area ref (possibly exact on top or bottom, but not both)
return null; // - no change to area
}
if (destFirstColumnIndex < aFirstColumn && aFirstColumn <= destLastColumnIndex) {
// dest columns overlap top of area
// - truncate the top
aptg.setFirstColumn(destLastColumnIndex+1);
return aptg;
}
if (destFirstColumnIndex <= aLastColumn && aLastColumn < destLastColumnIndex) {
// dest columns overlap bottom of area
// - truncate the bottom
aptg.setLastColumn(destFirstColumnIndex-1);
return aptg;
}
throw new IllegalStateException("Situation not covered: (" + _firstMovedIndex + ", " +
_lastMovedIndex + ", " + _amountToMove + ", " + aFirstColumn + ", " + aLastColumn + ")");
}
/**
* Modifies rptg in-place and return a reference to rptg if the cell reference
* would move due to a column copy operation
* Returns <code>null</code> or {@link RefErrorPtg} if no change was made
*
* @param rptg The REF that is copied
* @return The Ptg reference if the cell would move due to copy, otherwise null
*/
private Ptg columnCopyRefPtg(RefPtgBase rptg) {
final int refColumn = rptg.getColumn();
if (rptg.isColRelative()) {
// check new location where the ref is located
final int destColumnIndex = _firstMovedIndex + _amountToMove;
if (destColumnIndex < 0 || _version.getLastColumnIndex() < destColumnIndex) {
return createDeletedRef(rptg);
}
// check new location where the ref points to
final int newColumnIndex = refColumn + _amountToMove;
if(newColumnIndex < 0 || _version.getLastColumnIndex() < newColumnIndex) {
return createDeletedRef(rptg);
}
rptg.setColumn(newColumnIndex);
return rptg;
}
return null;
}
/**
* Modifies aptg in-place and return a reference to aptg if the first or last column of
* of the Area reference would move due to a column copy operation
* Returns <code>null</code> or {@link AreaErrPtg} if no change was made
*
* @param aptg The Area that is copied
* @return null, AreaErrPtg, or modified aptg
*/
private Ptg columnCopyAreaPtg(AreaPtgBase aptg) {
boolean changed = false;
final int aFirstColumn = aptg.getFirstColumn();
final int aLastColumn = aptg.getLastColumn();
if (aptg.isFirstColRelative()) {
final int destFirstColumnIndex = aFirstColumn + _amountToMove;
if (destFirstColumnIndex < 0 || _version.getLastColumnIndex() < destFirstColumnIndex)
return createDeletedRef(aptg);
aptg.setFirstColumn(destFirstColumnIndex);
changed = true;
}
if (aptg.isLastColRelative()) {
final int destLastColumnIndex = aLastColumn + _amountToMove;
if (destLastColumnIndex < 0 || _version.getLastColumnIndex() < destLastColumnIndex)
return createDeletedRef(aptg);
aptg.setLastColumn(destLastColumnIndex);
changed = true;
}
if (changed) {
aptg.sortTopLeftToBottomRight();
}
return changed ? aptg : null;
}
private static Ptg createDeletedRef(Ptg ptg) {
if (ptg instanceof RefPtg) {
return new RefErrorPtg();
}
if (ptg instanceof Ref3DPtg) {
Ref3DPtg rptg = (Ref3DPtg) ptg;
return new DeletedRef3DPtg(rptg.getExternSheetIndex());
}
if (ptg instanceof AreaPtg) {
return new AreaErrPtg();
}
if (ptg instanceof Area3DPtg) {
Area3DPtg area3DPtg = (Area3DPtg) ptg;
return new DeletedArea3DPtg(area3DPtg.getExternSheetIndex());
}
if (ptg instanceof Ref3DPxg) {
Ref3DPxg pxg = (Ref3DPxg)ptg;
return new Deleted3DPxg(pxg.getExternalWorkbookNumber(), pxg.getSheetName());
}
if (ptg instanceof Area3DPxg) {
Area3DPxg pxg = (Area3DPxg)ptg;
return new Deleted3DPxg(pxg.getExternalWorkbookNumber(), pxg.getSheetName());
}
throw new IllegalArgumentException("Unexpected ref ptg class (" + ptg.getClass().getName() + ")");
}
} |
openjdk/jdk8 | 35,670 | jaxp/src/com/sun/org/apache/xpath/internal/NodeSet.java | /*
* reserved comment block
* DO NOT REMOVE OR ALTER!
*/
/*
* Copyright 1999-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Id: NodeSet.java,v 1.2.4.1 2005/09/10 17:39:49 jeffsuttor Exp $
*/
package com.sun.org.apache.xpath.internal;
import com.sun.org.apache.xalan.internal.res.XSLMessages;
import com.sun.org.apache.xml.internal.utils.DOM2Helper;
import com.sun.org.apache.xpath.internal.axes.ContextNodeList;
import com.sun.org.apache.xpath.internal.res.XPATHErrorResources;
import org.w3c.dom.DOMException;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.traversal.NodeFilter;
import org.w3c.dom.traversal.NodeIterator;
/**
* <p>The NodeSet class can act as either a NodeVector,
* NodeList, or NodeIterator. However, in order for it to
* act as a NodeVector or NodeList, it's required that
* setShouldCacheNodes(true) be called before the first
* nextNode() is called, in order that nodes can be added
* as they are fetched. Derived classes that implement iterators
* must override runTo(int index), in order that they may
* run the iteration to the given index. </p>
*
* <p>Note that we directly implement the DOM's NodeIterator
* interface. We do not emulate all the behavior of the
* standard NodeIterator. In particular, we do not guarantee
* to present a "live view" of the document ... but in XSLT,
* the source document should never be mutated, so this should
* never be an issue.</p>
*
* <p>Thought: Should NodeSet really implement NodeList and NodeIterator,
* or should there be specific subclasses of it which do so? The
* advantage of doing it all here is that all NodeSets will respond
* to the same calls; the disadvantage is that some of them may return
* less-than-enlightening results when you do so.</p>
* @xsl.usage advanced
*/
public class NodeSet
implements NodeList, NodeIterator, Cloneable, ContextNodeList
{
/**
* Create an empty nodelist.
*/
public NodeSet()
{
m_blocksize = 32;
m_mapSize = 0;
}
/**
* Create an empty, using the given block size.
*
* @param blocksize Size of blocks to allocate
*/
public NodeSet(int blocksize)
{
m_blocksize = blocksize;
m_mapSize = 0;
}
/**
* Create a NodeSet, and copy the members of the
* given nodelist into it.
*
* @param nodelist List of Nodes to be made members of the new set.
*/
public NodeSet(NodeList nodelist)
{
this(32);
addNodes(nodelist);
}
/**
* Create a NodeSet, and copy the members of the
* given NodeSet into it.
*
* @param nodelist Set of Nodes to be made members of the new set.
*/
public NodeSet(NodeSet nodelist)
{
this(32);
addNodes((NodeIterator) nodelist);
}
/**
* Create a NodeSet, and copy the members of the
* given NodeIterator into it.
*
* @param ni Iterator which yields Nodes to be made members of the new set.
*/
public NodeSet(NodeIterator ni)
{
this(32);
addNodes(ni);
}
/**
* Create a NodeSet which contains the given Node.
*
* @param node Single node to be added to the new set.
*/
public NodeSet(Node node)
{
this(32);
addNode(node);
}
/**
* @return The root node of the Iterator, as specified when it was created.
* For non-Iterator NodeSets, this will be null.
*/
public Node getRoot()
{
return null;
}
/**
* Get a cloned Iterator, and reset its state to the beginning of the
* iteration.
*
* @return a new NodeSet of the same type, having the same state...
* except that the reset() operation has been called.
*
* @throws CloneNotSupportedException if this subclass of NodeSet
* does not support the clone() operation.
*/
public NodeIterator cloneWithReset() throws CloneNotSupportedException
{
NodeSet clone = (NodeSet) clone();
clone.reset();
return clone;
}
/**
* Reset the iterator. May have no effect on non-iterator Nodesets.
*/
public void reset()
{
m_next = 0;
}
/**
* This attribute determines which node types are presented via the
* iterator. The available set of constants is defined in the
* <code>NodeFilter</code> interface. For NodeSets, the mask has been
* hardcoded to show all nodes except EntityReference nodes, which have
* no equivalent in the XPath data model.
*
* @return integer used as a bit-array, containing flags defined in
* the DOM's NodeFilter class. The value will be
* <code>SHOW_ALL & ~SHOW_ENTITY_REFERENCE</code>, meaning that
* only entity references are suppressed.
*/
public int getWhatToShow()
{
return NodeFilter.SHOW_ALL & ~NodeFilter.SHOW_ENTITY_REFERENCE;
}
/**
* The filter object used to screen nodes. Filters are applied to
* further reduce (and restructure) the NodeIterator's view of the
* document. In our case, we will be using hardcoded filters built
* into our iterators... but getFilter() is part of the DOM's
* NodeIterator interface, so we have to support it.
*
* @return null, which is slightly misleading. True, there is no
* user-written filter object, but in fact we are doing some very
* sophisticated custom filtering. A DOM purist might suggest
* returning a placeholder object just to indicate that this is
* not going to return all nodes selected by whatToShow.
*/
public NodeFilter getFilter()
{
return null;
}
/**
* The value of this flag determines whether the children of entity
* reference nodes are visible to the iterator. If false, they will be
* skipped over.
* <br> To produce a view of the document that has entity references
* expanded and does not expose the entity reference node itself, use the
* whatToShow flags to hide the entity reference node and set
* expandEntityReferences to true when creating the iterator. To produce
* a view of the document that has entity reference nodes but no entity
* expansion, use the whatToShow flags to show the entity reference node
* and set expandEntityReferences to false.
*
* @return true for all iterators based on NodeSet, meaning that the
* contents of EntityRefrence nodes may be returned (though whatToShow
* says that the EntityReferences themselves are not shown.)
*/
public boolean getExpandEntityReferences()
{
return true;
}
/**
* Returns the next node in the set and advances the position of the
* iterator in the set. After a NodeIterator is created, the first call
* to nextNode() returns the first node in the set.
* @return The next <code>Node</code> in the set being iterated over, or
* <code>null</code> if there are no more members in that set.
* @throws DOMException
* INVALID_STATE_ERR: Raised if this method is called after the
* <code>detach</code> method was invoked.
*/
public Node nextNode() throws DOMException
{
if ((m_next) < this.size())
{
Node next = this.elementAt(m_next);
m_next++;
return next;
}
else
return null;
}
/**
* Returns the previous node in the set and moves the position of the
* iterator backwards in the set.
* @return The previous <code>Node</code> in the set being iterated over,
* or<code>null</code> if there are no more members in that set.
* @throws DOMException
* INVALID_STATE_ERR: Raised if this method is called after the
* <code>detach</code> method was invoked.
* @throws RuntimeException thrown if this NodeSet is not of
* a cached type, and hence doesn't know what the previous node was.
*/
public Node previousNode() throws DOMException
{
if (!m_cacheNodes)
throw new RuntimeException(
XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_CANNOT_ITERATE, null)); //"This NodeSet can not iterate to a previous node!");
if ((m_next - 1) > 0)
{
m_next--;
return this.elementAt(m_next);
}
else
return null;
}
/**
* Detaches the iterator from the set which it iterated over, releasing
* any computational resources and placing the iterator in the INVALID
* state. After<code>detach</code> has been invoked, calls to
* <code>nextNode</code> or<code>previousNode</code> will raise the
* exception INVALID_STATE_ERR.
* <p>
* This operation is a no-op in NodeSet, and will not cause
* INVALID_STATE_ERR to be raised by later operations.
* </p>
*/
public void detach(){}
/**
* Tells if this NodeSet is "fresh", in other words, if
* the first nextNode() that is called will return the
* first node in the set.
*
* @return true if nextNode() would return the first node in the set,
* false if it would return a later one.
*/
public boolean isFresh()
{
return (m_next == 0);
}
/**
* If an index is requested, NodeSet will call this method
* to run the iterator to the index. By default this sets
* m_next to the index. If the index argument is -1, this
* signals that the iterator should be run to the end.
*
* @param index Position to advance (or retreat) to, with
* 0 requesting the reset ("fresh") position and -1 (or indeed
* any out-of-bounds value) requesting the final position.
* @throws RuntimeException thrown if this NodeSet is not
* one of the types which supports indexing/counting.
*/
public void runTo(int index)
{
if (!m_cacheNodes)
throw new RuntimeException(
XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_CANNOT_INDEX, null)); //"This NodeSet can not do indexing or counting functions!");
if ((index >= 0) && (m_next < m_firstFree))
m_next = index;
else
m_next = m_firstFree - 1;
}
/**
* Returns the <code>index</code>th item in the collection. If
* <code>index</code> is greater than or equal to the number of nodes in
* the list, this returns <code>null</code>.
*
* TODO: What happens if index is out of range?
*
* @param index Index into the collection.
* @return The node at the <code>index</code>th position in the
* <code>NodeList</code>, or <code>null</code> if that is not a valid
* index.
*/
public Node item(int index)
{
runTo(index);
return (Node) this.elementAt(index);
}
/**
* The number of nodes in the list. The range of valid child node indices is
* 0 to <code>length-1</code> inclusive. Note that this operation requires
* finding all the matching nodes, which may defeat attempts to defer
* that work.
*
* @return integer indicating how many nodes are represented by this list.
*/
public int getLength()
{
runTo(-1);
return this.size();
}
/**
* Add a node to the NodeSet. Not all types of NodeSets support this
* operation
*
* @param n Node to be added
* @throws RuntimeException thrown if this NodeSet is not of
* a mutable type.
*/
public void addNode(Node n)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
this.addElement(n);
}
/**
* Insert a node at a given position.
*
* @param n Node to be added
* @param pos Offset at which the node is to be inserted,
* with 0 being the first position.
* @throws RuntimeException thrown if this NodeSet is not of
* a mutable type.
*/
public void insertNode(Node n, int pos)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
insertElementAt(n, pos);
}
/**
* Remove a node.
*
* @param n Node to be added
* @throws RuntimeException thrown if this NodeSet is not of
* a mutable type.
*/
public void removeNode(Node n)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
this.removeElement(n);
}
/**
* Copy NodeList members into this nodelist, adding in
* document order. If a node is null, don't add it.
*
* @param nodelist List of nodes which should now be referenced by
* this NodeSet.
* @throws RuntimeException thrown if this NodeSet is not of
* a mutable type.
*/
public void addNodes(NodeList nodelist)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
if (null != nodelist) // defensive to fix a bug that Sanjiva reported.
{
int nChildren = nodelist.getLength();
for (int i = 0; i < nChildren; i++)
{
Node obj = nodelist.item(i);
if (null != obj)
{
addElement(obj);
}
}
}
// checkDups();
}
/**
* <p>Copy NodeList members into this nodelist, adding in
* document order. Only genuine node references will be copied;
* nulls appearing in the source NodeSet will
* not be added to this one. </p>
*
* <p> In case you're wondering why this function is needed: NodeSet
* implements both NodeIterator and NodeList. If this method isn't
* provided, Java can't decide which of those to use when addNodes()
* is invoked. Providing the more-explicit match avoids that
* ambiguity.)</p>
*
* @param ns NodeSet whose members should be merged into this NodeSet.
* @throws RuntimeException thrown if this NodeSet is not of
* a mutable type.
*/
public void addNodes(NodeSet ns)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
addNodes((NodeIterator) ns);
}
/**
* Copy NodeList members into this nodelist, adding in
* document order. Null references are not added.
*
* @param iterator NodeIterator which yields the nodes to be added.
* @throws RuntimeException thrown if this NodeSet is not of
* a mutable type.
*/
public void addNodes(NodeIterator iterator)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
if (null != iterator) // defensive to fix a bug that Sanjiva reported.
{
Node obj;
while (null != (obj = iterator.nextNode()))
{
addElement(obj);
}
}
// checkDups();
}
/**
* Copy NodeList members into this nodelist, adding in
* document order. If a node is null, don't add it.
*
* @param nodelist List of nodes to be added
* @param support The XPath runtime context.
* @throws RuntimeException thrown if this NodeSet is not of
* a mutable type.
*/
public void addNodesInDocOrder(NodeList nodelist, XPathContext support)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
int nChildren = nodelist.getLength();
for (int i = 0; i < nChildren; i++)
{
Node node = nodelist.item(i);
if (null != node)
{
addNodeInDocOrder(node, support);
}
}
}
/**
* Copy NodeList members into this nodelist, adding in
* document order. If a node is null, don't add it.
*
* @param iterator NodeIterator which yields the nodes to be added.
* @param support The XPath runtime context.
* @throws RuntimeException thrown if this NodeSet is not of
* a mutable type.
*/
public void addNodesInDocOrder(NodeIterator iterator, XPathContext support)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
Node node;
while (null != (node = iterator.nextNode()))
{
addNodeInDocOrder(node, support);
}
}
/**
* Add the node list to this node set in document order.
*
* @param start index.
* @param end index.
* @param testIndex index.
* @param nodelist The nodelist to add.
* @param support The XPath runtime context.
*
* @return false always.
* @throws RuntimeException thrown if this NodeSet is not of
* a mutable type.
*/
private boolean addNodesInDocOrder(int start, int end, int testIndex,
NodeList nodelist, XPathContext support)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
boolean foundit = false;
int i;
Node node = nodelist.item(testIndex);
for (i = end; i >= start; i--)
{
Node child = (Node) elementAt(i);
if (child == node)
{
i = -2; // Duplicate, suppress insert
break;
}
if (!DOM2Helper.isNodeAfter(node, child))
{
insertElementAt(node, i + 1);
testIndex--;
if (testIndex > 0)
{
boolean foundPrev = addNodesInDocOrder(0, i, testIndex, nodelist,
support);
if (!foundPrev)
{
addNodesInDocOrder(i, size() - 1, testIndex, nodelist, support);
}
}
break;
}
}
if (i == -1)
{
insertElementAt(node, 0);
}
return foundit;
}
/**
* Add the node into a vector of nodes where it should occur in
* document order.
* @param node The node to be added.
* @param test true if we should test for doc order
* @param support The XPath runtime context.
* @return insertIndex.
* @throws RuntimeException thrown if this NodeSet is not of
* a mutable type.
*/
public int addNodeInDocOrder(Node node, boolean test, XPathContext support)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
int insertIndex = -1;
if (test)
{
// This needs to do a binary search, but a binary search
// is somewhat tough because the sequence test involves
// two nodes.
int size = size(), i;
for (i = size - 1; i >= 0; i--)
{
Node child = (Node) elementAt(i);
if (child == node)
{
i = -2; // Duplicate, suppress insert
break;
}
if (!DOM2Helper.isNodeAfter(node, child))
{
break;
}
}
if (i != -2)
{
insertIndex = i + 1;
insertElementAt(node, insertIndex);
}
}
else
{
insertIndex = this.size();
boolean foundit = false;
for (int i = 0; i < insertIndex; i++)
{
if (this.item(i).equals(node))
{
foundit = true;
break;
}
}
if (!foundit)
addElement(node);
}
// checkDups();
return insertIndex;
} // end addNodeInDocOrder(Vector v, Object obj)
/**
* Add the node into a vector of nodes where it should occur in
* document order.
* @param node The node to be added.
* @param support The XPath runtime context.
*
* @return The index where it was inserted.
* @throws RuntimeException thrown if this NodeSet is not of
* a mutable type.
*/
public int addNodeInDocOrder(Node node, XPathContext support)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
return addNodeInDocOrder(node, true, support);
} // end addNodeInDocOrder(Vector v, Object obj)
/** If this node is being used as an iterator, the next index that nextNode()
* will return. */
transient protected int m_next = 0;
/**
* Get the current position, which is one less than
* the next nextNode() call will retrieve. i.e. if
* you call getCurrentPos() and the return is 0, the next
* fetch will take place at index 1.
*
* @return The the current position index.
*/
public int getCurrentPos()
{
return m_next;
}
/**
* Set the current position in the node set.
* @param i Must be a valid index.
* @throws RuntimeException thrown if this NodeSet is not of
* a cached type, and thus doesn't permit indexed access.
*/
public void setCurrentPos(int i)
{
if (!m_cacheNodes)
throw new RuntimeException(
XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_CANNOT_INDEX, null)); //"This NodeSet can not do indexing or counting functions!");
m_next = i;
}
/**
* Return the last fetched node. Needed to support the UnionPathIterator.
*
* @return the last fetched node.
* @throws RuntimeException thrown if this NodeSet is not of
* a cached type, and thus doesn't permit indexed access.
*/
public Node getCurrentNode()
{
if (!m_cacheNodes)
throw new RuntimeException(
XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_CANNOT_INDEX, null)); //"This NodeSet can not do indexing or counting functions!");
int saved = m_next;
Node n = (m_next < m_firstFree) ? elementAt(m_next) : null;
m_next = saved; // HACK: I think this is a bit of a hack. -sb
return n;
}
/** True if this list can be mutated. */
transient protected boolean m_mutable = true;
/** True if this list is cached.
* @serial */
transient protected boolean m_cacheNodes = true;
/**
* Get whether or not this is a cached node set.
*
*
* @return True if this list is cached.
*/
public boolean getShouldCacheNodes()
{
return m_cacheNodes;
}
/**
* If setShouldCacheNodes(true) is called, then nodes will
* be cached. They are not cached by default. This switch must
* be set before the first call to nextNode is made, to ensure
* that all nodes are cached.
*
* @param b true if this node set should be cached.
* @throws RuntimeException thrown if an attempt is made to
* request caching after we've already begun stepping through the
* nodes in this set.
*/
public void setShouldCacheNodes(boolean b)
{
if (!isFresh())
throw new RuntimeException(
XSLMessages.createXPATHMessage(XPATHErrorResources.ER_CANNOT_CALL_SETSHOULDCACHENODE, null)); //"Can not call setShouldCacheNodes after nextNode has been called!");
m_cacheNodes = b;
m_mutable = true;
}
transient private int m_last = 0;
public int getLast()
{
return m_last;
}
public void setLast(int last)
{
m_last = last;
}
/** Size of blocks to allocate.
* @serial */
private int m_blocksize;
/** Array of nodes this points to.
* @serial */
Node m_map[];
/** Number of nodes in this NodeVector.
* @serial */
protected int m_firstFree = 0;
/** Size of the array this points to.
* @serial */
private int m_mapSize; // lazy initialization
/**
* Get a cloned LocPathIterator.
*
* @return A clone of this
*
* @throws CloneNotSupportedException
*/
public Object clone() throws CloneNotSupportedException
{
NodeSet clone = (NodeSet) super.clone();
if ((null != this.m_map) && (this.m_map == clone.m_map))
{
clone.m_map = new Node[this.m_map.length];
System.arraycopy(this.m_map, 0, clone.m_map, 0, this.m_map.length);
}
return clone;
}
/**
* Get the length of the list.
*
* @return Number of nodes in this NodeVector
*/
public int size()
{
return m_firstFree;
}
/**
* Append a Node onto the vector.
*
* @param value Node to add to the vector
*/
public void addElement(Node value)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
if ((m_firstFree + 1) >= m_mapSize)
{
if (null == m_map)
{
m_map = new Node[m_blocksize];
m_mapSize = m_blocksize;
}
else
{
m_mapSize += m_blocksize;
Node newMap[] = new Node[m_mapSize];
System.arraycopy(m_map, 0, newMap, 0, m_firstFree + 1);
m_map = newMap;
}
}
m_map[m_firstFree] = value;
m_firstFree++;
}
/**
* Append a Node onto the vector.
*
* @param value Node to add to the vector
*/
public final void push(Node value)
{
int ff = m_firstFree;
if ((ff + 1) >= m_mapSize)
{
if (null == m_map)
{
m_map = new Node[m_blocksize];
m_mapSize = m_blocksize;
}
else
{
m_mapSize += m_blocksize;
Node newMap[] = new Node[m_mapSize];
System.arraycopy(m_map, 0, newMap, 0, ff + 1);
m_map = newMap;
}
}
m_map[ff] = value;
ff++;
m_firstFree = ff;
}
/**
* Pop a node from the tail of the vector and return the result.
*
* @return the node at the tail of the vector
*/
public final Node pop()
{
m_firstFree--;
Node n = m_map[m_firstFree];
m_map[m_firstFree] = null;
return n;
}
/**
* Pop a node from the tail of the vector and return the
* top of the stack after the pop.
*
* @return The top of the stack after it's been popped
*/
public final Node popAndTop()
{
m_firstFree--;
m_map[m_firstFree] = null;
return (m_firstFree == 0) ? null : m_map[m_firstFree - 1];
}
/**
* Pop a node from the tail of the vector.
*/
public final void popQuick()
{
m_firstFree--;
m_map[m_firstFree] = null;
}
/**
* Return the node at the top of the stack without popping the stack.
* Special purpose method for TransformerImpl, pushElemTemplateElement.
* Performance critical.
*
* @return Node at the top of the stack or null if stack is empty.
*/
public final Node peepOrNull()
{
return ((null != m_map) && (m_firstFree > 0))
? m_map[m_firstFree - 1] : null;
}
/**
* Push a pair of nodes into the stack.
* Special purpose method for TransformerImpl, pushElemTemplateElement.
* Performance critical.
*
* @param v1 First node to add to vector
* @param v2 Second node to add to vector
*/
public final void pushPair(Node v1, Node v2)
{
if (null == m_map)
{
m_map = new Node[m_blocksize];
m_mapSize = m_blocksize;
}
else
{
if ((m_firstFree + 2) >= m_mapSize)
{
m_mapSize += m_blocksize;
Node newMap[] = new Node[m_mapSize];
System.arraycopy(m_map, 0, newMap, 0, m_firstFree);
m_map = newMap;
}
}
m_map[m_firstFree] = v1;
m_map[m_firstFree + 1] = v2;
m_firstFree += 2;
}
/**
* Pop a pair of nodes from the tail of the stack.
* Special purpose method for TransformerImpl, pushElemTemplateElement.
* Performance critical.
*/
public final void popPair()
{
m_firstFree -= 2;
m_map[m_firstFree] = null;
m_map[m_firstFree + 1] = null;
}
/**
* Set the tail of the stack to the given node.
* Special purpose method for TransformerImpl, pushElemTemplateElement.
* Performance critical.
*
* @param n Node to set at the tail of vector
*/
public final void setTail(Node n)
{
m_map[m_firstFree - 1] = n;
}
/**
* Set the given node one position from the tail.
* Special purpose method for TransformerImpl, pushElemTemplateElement.
* Performance critical.
*
* @param n Node to set
*/
public final void setTailSub1(Node n)
{
m_map[m_firstFree - 2] = n;
}
/**
* Return the node at the tail of the vector without popping
* Special purpose method for TransformerImpl, pushElemTemplateElement.
* Performance critical.
*
* @return Node at the tail of the vector
*/
public final Node peepTail()
{
return m_map[m_firstFree - 1];
}
/**
* Return the node one position from the tail without popping.
* Special purpose method for TransformerImpl, pushElemTemplateElement.
* Performance critical.
*
* @return Node one away from the tail
*/
public final Node peepTailSub1()
{
return m_map[m_firstFree - 2];
}
/**
* Inserts the specified node in this vector at the specified index.
* Each component in this vector with an index greater or equal to
* the specified index is shifted upward to have an index one greater
* than the value it had previously.
*
* @param value Node to insert
* @param at Position where to insert
*/
public void insertElementAt(Node value, int at)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
if (null == m_map)
{
m_map = new Node[m_blocksize];
m_mapSize = m_blocksize;
}
else if ((m_firstFree + 1) >= m_mapSize)
{
m_mapSize += m_blocksize;
Node newMap[] = new Node[m_mapSize];
System.arraycopy(m_map, 0, newMap, 0, m_firstFree + 1);
m_map = newMap;
}
if (at <= (m_firstFree - 1))
{
System.arraycopy(m_map, at, m_map, at + 1, m_firstFree - at);
}
m_map[at] = value;
m_firstFree++;
}
/**
* Append the nodes to the list.
*
* @param nodes NodeVector to append to this list
*/
public void appendNodes(NodeSet nodes)
{
int nNodes = nodes.size();
if (null == m_map)
{
m_mapSize = nNodes + m_blocksize;
m_map = new Node[m_mapSize];
}
else if ((m_firstFree + nNodes) >= m_mapSize)
{
m_mapSize += (nNodes + m_blocksize);
Node newMap[] = new Node[m_mapSize];
System.arraycopy(m_map, 0, newMap, 0, m_firstFree + nNodes);
m_map = newMap;
}
System.arraycopy(nodes.m_map, 0, m_map, m_firstFree, nNodes);
m_firstFree += nNodes;
}
/**
* Inserts the specified node in this vector at the specified index.
* Each component in this vector with an index greater or equal to
* the specified index is shifted upward to have an index one greater
* than the value it had previously.
*/
public void removeAllElements()
{
if (null == m_map)
return;
for (int i = 0; i < m_firstFree; i++)
{
m_map[i] = null;
}
m_firstFree = 0;
}
/**
* Removes the first occurrence of the argument from this vector.
* If the object is found in this vector, each component in the vector
* with an index greater or equal to the object's index is shifted
* downward to have an index one smaller than the value it had
* previously.
*
* @param s Node to remove from the list
*
* @return True if the node was successfully removed
*/
public boolean removeElement(Node s)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
if (null == m_map)
return false;
for (int i = 0; i < m_firstFree; i++)
{
Node node = m_map[i];
if ((null != node) && node.equals(s))
{
if (i < m_firstFree - 1)
System.arraycopy(m_map, i + 1, m_map, i, m_firstFree - i - 1);
m_firstFree--;
m_map[m_firstFree] = null;
return true;
}
}
return false;
}
/**
* Deletes the component at the specified index. Each component in
* this vector with an index greater or equal to the specified
* index is shifted downward to have an index one smaller than
* the value it had previously.
*
* @param i Index of node to remove
*/
public void removeElementAt(int i)
{
if (null == m_map)
return;
if (i >= m_firstFree)
throw new ArrayIndexOutOfBoundsException(i + " >= " + m_firstFree);
else if (i < 0)
throw new ArrayIndexOutOfBoundsException(i);
if (i < m_firstFree - 1)
System.arraycopy(m_map, i + 1, m_map, i, m_firstFree - i - 1);
m_firstFree--;
m_map[m_firstFree] = null;
}
/**
* Sets the component at the specified index of this vector to be the
* specified object. The previous component at that position is discarded.
*
* The index must be a value greater than or equal to 0 and less
* than the current size of the vector.
*
* @param node Node to set
* @param index Index of where to set the node
*/
public void setElementAt(Node node, int index)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
if (null == m_map)
{
m_map = new Node[m_blocksize];
m_mapSize = m_blocksize;
}
m_map[index] = node;
}
/**
* Get the nth element.
*
* @param i Index of node to get
*
* @return Node at specified index
*/
public Node elementAt(int i)
{
if (null == m_map)
return null;
return m_map[i];
}
/**
* Tell if the table contains the given node.
*
* @param s Node to look for
*
* @return True if the given node was found.
*/
public boolean contains(Node s)
{
runTo(-1);
if (null == m_map)
return false;
for (int i = 0; i < m_firstFree; i++)
{
Node node = m_map[i];
if ((null != node) && node.equals(s))
return true;
}
return false;
}
/**
* Searches for the first occurence of the given argument,
* beginning the search at index, and testing for equality
* using the equals method.
*
* @param elem Node to look for
* @param index Index of where to start the search
* @return the index of the first occurrence of the object
* argument in this vector at position index or later in the
* vector; returns -1 if the object is not found.
*/
public int indexOf(Node elem, int index)
{
runTo(-1);
if (null == m_map)
return -1;
for (int i = index; i < m_firstFree; i++)
{
Node node = m_map[i];
if ((null != node) && node.equals(elem))
return i;
}
return -1;
}
/**
* Searches for the first occurence of the given argument,
* beginning the search at index, and testing for equality
* using the equals method.
*
* @param elem Node to look for
* @return the index of the first occurrence of the object
* argument in this vector at position index or later in the
* vector; returns -1 if the object is not found.
*/
public int indexOf(Node elem)
{
runTo(-1);
if (null == m_map)
return -1;
for (int i = 0; i < m_firstFree; i++)
{
Node node = m_map[i];
if ((null != node) && node.equals(elem))
return i;
}
return -1;
}
}
|
apache/flink | 36,013 | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/util/SegmentsUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.util;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.core.memory.DataOutputView;
import org.apache.flink.core.memory.MemorySegment;
import java.io.IOException;
import java.nio.ByteOrder;
import static org.apache.flink.core.memory.MemoryUtils.UNSAFE;
/** Util for data format segments calc. */
public class SegmentsUtil {
/** Constant that flags the byte order. */
public static final boolean LITTLE_ENDIAN = ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN;
private static final int ADDRESS_BITS_PER_WORD = 3;
private static final int BIT_BYTE_INDEX_MASK = 7;
/**
* SQL execution threads is limited, not too many, so it can bear the overhead of 64K per
* thread.
*/
private static final int MAX_BYTES_LENGTH = 1024 * 64;
private static final int MAX_CHARS_LENGTH = 1024 * 32;
private static final int BYTE_ARRAY_BASE_OFFSET = UNSAFE.arrayBaseOffset(byte[].class);
private static final ThreadLocal<byte[]> BYTES_LOCAL = new ThreadLocal<>();
private static final ThreadLocal<char[]> CHARS_LOCAL = new ThreadLocal<>();
/**
* Allocate bytes that is only for temporary usage, it should not be stored in somewhere else.
* Use a {@link ThreadLocal} to reuse bytes to avoid overhead of byte[] new and gc.
*
* <p>If there are methods that can only accept a byte[], instead of a MemorySegment[]
* parameter, we can allocate a reuse bytes and copy the MemorySegment data to byte[], then call
* the method. Such as String deserialization.
*/
public static byte[] allocateReuseBytes(int length) {
byte[] bytes = BYTES_LOCAL.get();
if (bytes == null) {
if (length <= MAX_BYTES_LENGTH) {
bytes = new byte[MAX_BYTES_LENGTH];
BYTES_LOCAL.set(bytes);
} else {
bytes = new byte[length];
}
} else if (bytes.length < length) {
bytes = new byte[length];
}
return bytes;
}
public static char[] allocateReuseChars(int length) {
char[] chars = CHARS_LOCAL.get();
if (chars == null) {
if (length <= MAX_CHARS_LENGTH) {
chars = new char[MAX_CHARS_LENGTH];
CHARS_LOCAL.set(chars);
} else {
chars = new char[length];
}
} else if (chars.length < length) {
chars = new char[length];
}
return chars;
}
/**
* Copy segments to a new byte[].
*
* @param segments Source segments.
* @param offset Source segments offset.
* @param numBytes the number bytes to copy.
*/
public static byte[] copyToBytes(MemorySegment[] segments, int offset, int numBytes) {
return copyToBytes(segments, offset, new byte[numBytes], 0, numBytes);
}
/**
* Copy segments to target byte[].
*
* @param segments Source segments.
* @param offset Source segments offset.
* @param bytes target byte[].
* @param bytesOffset target byte[] offset.
* @param numBytes the number bytes to copy.
*/
public static byte[] copyToBytes(
MemorySegment[] segments, int offset, byte[] bytes, int bytesOffset, int numBytes) {
if (inFirstSegment(segments, offset, numBytes)) {
segments[0].get(offset, bytes, bytesOffset, numBytes);
} else {
copyMultiSegmentsToBytes(segments, offset, bytes, bytesOffset, numBytes);
}
return bytes;
}
public static void copyMultiSegmentsToBytes(
MemorySegment[] segments, int offset, byte[] bytes, int bytesOffset, int numBytes) {
int remainSize = numBytes;
for (MemorySegment segment : segments) {
int remain = segment.size() - offset;
if (remain > 0) {
int nCopy = Math.min(remain, remainSize);
segment.get(offset, bytes, numBytes - remainSize + bytesOffset, nCopy);
remainSize -= nCopy;
// next new segment.
offset = 0;
if (remainSize == 0) {
return;
}
} else {
// remain is negative, let's advance to next segment
// now the offset = offset - segmentSize (-remain)
offset = -remain;
}
}
}
/**
* Copy segments to target unsafe pointer.
*
* @param segments Source segments.
* @param offset The position where the bytes are started to be read from these memory segments.
* @param target The unsafe memory to copy the bytes to.
* @param pointer The position in the target unsafe memory to copy the chunk to.
* @param numBytes the number bytes to copy.
*/
public static void copyToUnsafe(
MemorySegment[] segments, int offset, Object target, int pointer, int numBytes) {
if (inFirstSegment(segments, offset, numBytes)) {
segments[0].copyToUnsafe(offset, target, pointer, numBytes);
} else {
copyMultiSegmentsToUnsafe(segments, offset, target, pointer, numBytes);
}
}
private static void copyMultiSegmentsToUnsafe(
MemorySegment[] segments, int offset, Object target, int pointer, int numBytes) {
int remainSize = numBytes;
for (MemorySegment segment : segments) {
int remain = segment.size() - offset;
if (remain > 0) {
int nCopy = Math.min(remain, remainSize);
segment.copyToUnsafe(offset, target, numBytes - remainSize + pointer, nCopy);
remainSize -= nCopy;
// next new segment.
offset = 0;
if (remainSize == 0) {
return;
}
} else {
// remain is negative, let's advance to next segment
// now the offset = offset - segmentSize (-remain)
offset = -remain;
}
}
}
/**
* Copy bytes of segments to output view. Note: It just copies the data in, not include the
* length.
*
* @param segments source segments
* @param offset offset for segments
* @param sizeInBytes size in bytes
* @param target target output view
*/
public static void copyToView(
MemorySegment[] segments, int offset, int sizeInBytes, DataOutputView target)
throws IOException {
for (MemorySegment sourceSegment : segments) {
int curSegRemain = sourceSegment.size() - offset;
if (curSegRemain > 0) {
int copySize = Math.min(curSegRemain, sizeInBytes);
byte[] bytes = allocateReuseBytes(copySize);
sourceSegment.get(offset, bytes, 0, copySize);
target.write(bytes, 0, copySize);
sizeInBytes -= copySize;
offset = 0;
} else {
offset -= sourceSegment.size();
}
if (sizeInBytes == 0) {
return;
}
}
if (sizeInBytes != 0) {
throw new RuntimeException(
"No copy finished, this should be a bug, "
+ "The remaining length is: "
+ sizeInBytes);
}
}
/**
* Copy target segments from source byte[].
*
* @param segments target segments.
* @param offset target segments offset.
* @param bytes source byte[].
* @param bytesOffset source byte[] offset.
* @param numBytes the number bytes to copy.
*/
public static void copyFromBytes(
MemorySegment[] segments, int offset, byte[] bytes, int bytesOffset, int numBytes) {
if (segments.length == 1) {
segments[0].put(offset, bytes, bytesOffset, numBytes);
} else {
copyMultiSegmentsFromBytes(segments, offset, bytes, bytesOffset, numBytes);
}
}
private static void copyMultiSegmentsFromBytes(
MemorySegment[] segments, int offset, byte[] bytes, int bytesOffset, int numBytes) {
int remainSize = numBytes;
for (MemorySegment segment : segments) {
int remain = segment.size() - offset;
if (remain > 0) {
int nCopy = Math.min(remain, remainSize);
segment.put(offset, bytes, numBytes - remainSize + bytesOffset, nCopy);
remainSize -= nCopy;
// next new segment.
offset = 0;
if (remainSize == 0) {
return;
}
} else {
// remain is negative, let's advance to next segment
// now the offset = offset - segmentSize (-remain)
offset = -remain;
}
}
}
/** Maybe not copied, if want copy, please use copyTo. */
public static byte[] getBytes(MemorySegment[] segments, int baseOffset, int sizeInBytes) {
// avoid copy if `base` is `byte[]`
if (segments.length == 1) {
byte[] heapMemory = segments[0].getHeapMemory();
if (baseOffset == 0 && heapMemory != null && heapMemory.length == sizeInBytes) {
return heapMemory;
} else {
byte[] bytes = new byte[sizeInBytes];
segments[0].get(baseOffset, bytes, 0, sizeInBytes);
return bytes;
}
} else {
byte[] bytes = new byte[sizeInBytes];
copyMultiSegmentsToBytes(segments, baseOffset, bytes, 0, sizeInBytes);
return bytes;
}
}
/**
* Equals two memory segments regions.
*
* @param segments1 Segments 1
* @param offset1 Offset of segments1 to start equaling
* @param segments2 Segments 2
* @param offset2 Offset of segments2 to start equaling
* @param len Length of the equaled memory region
* @return true if equal, false otherwise
*/
public static boolean equals(
MemorySegment[] segments1,
int offset1,
MemorySegment[] segments2,
int offset2,
int len) {
if (inFirstSegment(segments1, offset1, len) && inFirstSegment(segments2, offset2, len)) {
return segments1[0].equalTo(segments2[0], offset1, offset2, len);
} else {
return equalsMultiSegments(segments1, offset1, segments2, offset2, len);
}
}
@VisibleForTesting
static boolean equalsMultiSegments(
MemorySegment[] segments1,
int offset1,
MemorySegment[] segments2,
int offset2,
int len) {
if (len == 0) {
// quick way and avoid segSize is zero.
return true;
}
int segSize1 = segments1[0].size();
int segSize2 = segments2[0].size();
// find first segIndex and segOffset of segments.
int segIndex1 = offset1 / segSize1;
int segIndex2 = offset2 / segSize2;
int segOffset1 = offset1 - segSize1 * segIndex1; // equal to %
int segOffset2 = offset2 - segSize2 * segIndex2; // equal to %
while (len > 0) {
int equalLen = Math.min(Math.min(len, segSize1 - segOffset1), segSize2 - segOffset2);
if (!segments1[segIndex1].equalTo(
segments2[segIndex2], segOffset1, segOffset2, equalLen)) {
return false;
}
len -= equalLen;
segOffset1 += equalLen;
if (segOffset1 == segSize1) {
segOffset1 = 0;
segIndex1++;
}
segOffset2 += equalLen;
if (segOffset2 == segSize2) {
segOffset2 = 0;
segIndex2++;
}
}
return true;
}
/**
* hash segments to int, numBytes must be aligned to 4 bytes.
*
* @param segments Source segments.
* @param offset Source segments offset.
* @param numBytes the number bytes to hash.
*/
public static int hashByWords(MemorySegment[] segments, int offset, int numBytes) {
if (inFirstSegment(segments, offset, numBytes)) {
return MurmurHashUtil.hashBytesByWords(segments[0], offset, numBytes);
} else {
return hashMultiSegByWords(segments, offset, numBytes);
}
}
private static int hashMultiSegByWords(MemorySegment[] segments, int offset, int numBytes) {
byte[] bytes = allocateReuseBytes(numBytes);
copyMultiSegmentsToBytes(segments, offset, bytes, 0, numBytes);
return MurmurHashUtil.hashUnsafeBytesByWords(bytes, BYTE_ARRAY_BASE_OFFSET, numBytes);
}
/**
* hash segments to int.
*
* @param segments Source segments.
* @param offset Source segments offset.
* @param numBytes the number bytes to hash.
*/
public static int hash(MemorySegment[] segments, int offset, int numBytes) {
if (inFirstSegment(segments, offset, numBytes)) {
return MurmurHashUtil.hashBytes(segments[0], offset, numBytes);
} else {
return hashMultiSeg(segments, offset, numBytes);
}
}
private static int hashMultiSeg(MemorySegment[] segments, int offset, int numBytes) {
byte[] bytes = allocateReuseBytes(numBytes);
copyMultiSegmentsToBytes(segments, offset, bytes, 0, numBytes);
return MurmurHashUtil.hashUnsafeBytes(bytes, BYTE_ARRAY_BASE_OFFSET, numBytes);
}
/** Is it just in first MemorySegment, we use quick way to do something. */
private static boolean inFirstSegment(MemorySegment[] segments, int offset, int numBytes) {
return numBytes + offset <= segments[0].size();
}
/**
* Given a bit index, return the byte index containing it.
*
* @param bitIndex the bit index.
* @return the byte index.
*/
private static int byteIndex(int bitIndex) {
return bitIndex >>> ADDRESS_BITS_PER_WORD;
}
/**
* unset bit.
*
* @param segment target segment.
* @param baseOffset bits base offset.
* @param index bit index from base offset.
*/
public static void bitUnSet(MemorySegment segment, int baseOffset, int index) {
int offset = baseOffset + byteIndex(index);
byte current = segment.get(offset);
current &= ~(1 << (index & BIT_BYTE_INDEX_MASK));
segment.put(offset, current);
}
/**
* set bit.
*
* @param segment target segment.
* @param baseOffset bits base offset.
* @param index bit index from base offset.
*/
public static void bitSet(MemorySegment segment, int baseOffset, int index) {
int offset = baseOffset + byteIndex(index);
byte current = segment.get(offset);
current |= (1 << (index & BIT_BYTE_INDEX_MASK));
segment.put(offset, current);
}
/**
* read bit.
*
* @param segment target segment.
* @param baseOffset bits base offset.
* @param index bit index from base offset.
*/
public static boolean bitGet(MemorySegment segment, int baseOffset, int index) {
int offset = baseOffset + byteIndex(index);
byte current = segment.get(offset);
return (current & (1 << (index & BIT_BYTE_INDEX_MASK))) != 0;
}
/**
* unset bit from segments.
*
* @param segments target segments.
* @param baseOffset bits base offset.
* @param index bit index from base offset.
*/
public static void bitUnSet(MemorySegment[] segments, int baseOffset, int index) {
if (segments.length == 1) {
MemorySegment segment = segments[0];
int offset = baseOffset + byteIndex(index);
byte current = segment.get(offset);
current &= ~(1 << (index & BIT_BYTE_INDEX_MASK));
segment.put(offset, current);
} else {
bitUnSetMultiSegments(segments, baseOffset, index);
}
}
private static void bitUnSetMultiSegments(MemorySegment[] segments, int baseOffset, int index) {
int offset = baseOffset + byteIndex(index);
int segSize = segments[0].size();
int segIndex = offset / segSize;
int segOffset = offset - segIndex * segSize; // equal to %
MemorySegment segment = segments[segIndex];
byte current = segment.get(segOffset);
current &= ~(1 << (index & BIT_BYTE_INDEX_MASK));
segment.put(segOffset, current);
}
/**
* set bit from segments.
*
* @param segments target segments.
* @param baseOffset bits base offset.
* @param index bit index from base offset.
*/
public static void bitSet(MemorySegment[] segments, int baseOffset, int index) {
if (segments.length == 1) {
int offset = baseOffset + byteIndex(index);
MemorySegment segment = segments[0];
byte current = segment.get(offset);
current |= (1 << (index & BIT_BYTE_INDEX_MASK));
segment.put(offset, current);
} else {
bitSetMultiSegments(segments, baseOffset, index);
}
}
private static void bitSetMultiSegments(MemorySegment[] segments, int baseOffset, int index) {
int offset = baseOffset + byteIndex(index);
int segSize = segments[0].size();
int segIndex = offset / segSize;
int segOffset = offset - segIndex * segSize; // equal to %
MemorySegment segment = segments[segIndex];
byte current = segment.get(segOffset);
current |= (1 << (index & BIT_BYTE_INDEX_MASK));
segment.put(segOffset, current);
}
/**
* read bit from segments.
*
* @param segments target segments.
* @param baseOffset bits base offset.
* @param index bit index from base offset.
*/
public static boolean bitGet(MemorySegment[] segments, int baseOffset, int index) {
int offset = baseOffset + byteIndex(index);
byte current = getByte(segments, offset);
return (current & (1 << (index & BIT_BYTE_INDEX_MASK))) != 0;
}
/**
* get boolean from segments.
*
* @param segments target segments.
* @param offset value offset.
*/
public static boolean getBoolean(MemorySegment[] segments, int offset) {
if (inFirstSegment(segments, offset, 1)) {
return segments[0].getBoolean(offset);
} else {
return getBooleanMultiSegments(segments, offset);
}
}
private static boolean getBooleanMultiSegments(MemorySegment[] segments, int offset) {
int segSize = segments[0].size();
int segIndex = offset / segSize;
int segOffset = offset - segIndex * segSize; // equal to %
return segments[segIndex].getBoolean(segOffset);
}
/**
* set boolean from segments.
*
* @param segments target segments.
* @param offset value offset.
*/
public static void setBoolean(MemorySegment[] segments, int offset, boolean value) {
if (inFirstSegment(segments, offset, 1)) {
segments[0].putBoolean(offset, value);
} else {
setBooleanMultiSegments(segments, offset, value);
}
}
private static void setBooleanMultiSegments(
MemorySegment[] segments, int offset, boolean value) {
int segSize = segments[0].size();
int segIndex = offset / segSize;
int segOffset = offset - segIndex * segSize; // equal to %
segments[segIndex].putBoolean(segOffset, value);
}
/**
* get byte from segments.
*
* @param segments target segments.
* @param offset value offset.
*/
public static byte getByte(MemorySegment[] segments, int offset) {
if (inFirstSegment(segments, offset, 1)) {
return segments[0].get(offset);
} else {
return getByteMultiSegments(segments, offset);
}
}
private static byte getByteMultiSegments(MemorySegment[] segments, int offset) {
int segSize = segments[0].size();
int segIndex = offset / segSize;
int segOffset = offset - segIndex * segSize; // equal to %
return segments[segIndex].get(segOffset);
}
/**
* set byte from segments.
*
* @param segments target segments.
* @param offset value offset.
*/
public static void setByte(MemorySegment[] segments, int offset, byte value) {
if (inFirstSegment(segments, offset, 1)) {
segments[0].put(offset, value);
} else {
setByteMultiSegments(segments, offset, value);
}
}
private static void setByteMultiSegments(MemorySegment[] segments, int offset, byte value) {
int segSize = segments[0].size();
int segIndex = offset / segSize;
int segOffset = offset - segIndex * segSize; // equal to %
segments[segIndex].put(segOffset, value);
}
/**
* get int from segments.
*
* @param segments target segments.
* @param offset value offset.
*/
public static int getInt(MemorySegment[] segments, int offset) {
if (inFirstSegment(segments, offset, 4)) {
return segments[0].getInt(offset);
} else {
return getIntMultiSegments(segments, offset);
}
}
private static int getIntMultiSegments(MemorySegment[] segments, int offset) {
int segSize = segments[0].size();
int segIndex = offset / segSize;
int segOffset = offset - segIndex * segSize; // equal to %
if (segOffset < segSize - 3) {
return segments[segIndex].getInt(segOffset);
} else {
return getIntSlowly(segments, segSize, segIndex, segOffset);
}
}
private static int getIntSlowly(
MemorySegment[] segments, int segSize, int segNum, int segOffset) {
MemorySegment segment = segments[segNum];
int ret = 0;
for (int i = 0; i < 4; i++) {
if (segOffset == segSize) {
segment = segments[++segNum];
segOffset = 0;
}
int unsignedByte = segment.get(segOffset) & 0xff;
if (LITTLE_ENDIAN) {
ret |= (unsignedByte << (i * 8));
} else {
ret |= (unsignedByte << ((3 - i) * 8));
}
segOffset++;
}
return ret;
}
/**
* set int from segments.
*
* @param segments target segments.
* @param offset value offset.
*/
public static void setInt(MemorySegment[] segments, int offset, int value) {
if (inFirstSegment(segments, offset, 4)) {
segments[0].putInt(offset, value);
} else {
setIntMultiSegments(segments, offset, value);
}
}
private static void setIntMultiSegments(MemorySegment[] segments, int offset, int value) {
int segSize = segments[0].size();
int segIndex = offset / segSize;
int segOffset = offset - segIndex * segSize; // equal to %
if (segOffset < segSize - 3) {
segments[segIndex].putInt(segOffset, value);
} else {
setIntSlowly(segments, segSize, segIndex, segOffset, value);
}
}
private static void setIntSlowly(
MemorySegment[] segments, int segSize, int segNum, int segOffset, int value) {
MemorySegment segment = segments[segNum];
for (int i = 0; i < 4; i++) {
if (segOffset == segSize) {
segment = segments[++segNum];
segOffset = 0;
}
int unsignedByte;
if (LITTLE_ENDIAN) {
unsignedByte = value >> (i * 8);
} else {
unsignedByte = value >> ((3 - i) * 8);
}
segment.put(segOffset, (byte) unsignedByte);
segOffset++;
}
}
/**
* get long from segments.
*
* @param segments target segments.
* @param offset value offset.
*/
public static long getLong(MemorySegment[] segments, int offset) {
if (inFirstSegment(segments, offset, 8)) {
return segments[0].getLong(offset);
} else {
return getLongMultiSegments(segments, offset);
}
}
private static long getLongMultiSegments(MemorySegment[] segments, int offset) {
int segSize = segments[0].size();
int segIndex = offset / segSize;
int segOffset = offset - segIndex * segSize; // equal to %
if (segOffset < segSize - 7) {
return segments[segIndex].getLong(segOffset);
} else {
return getLongSlowly(segments, segSize, segIndex, segOffset);
}
}
private static long getLongSlowly(
MemorySegment[] segments, int segSize, int segNum, int segOffset) {
MemorySegment segment = segments[segNum];
long ret = 0;
for (int i = 0; i < 8; i++) {
if (segOffset == segSize) {
segment = segments[++segNum];
segOffset = 0;
}
long unsignedByte = segment.get(segOffset) & 0xff;
if (LITTLE_ENDIAN) {
ret |= (unsignedByte << (i * 8));
} else {
ret |= (unsignedByte << ((7 - i) * 8));
}
segOffset++;
}
return ret;
}
/**
* set long from segments.
*
* @param segments target segments.
* @param offset value offset.
*/
public static void setLong(MemorySegment[] segments, int offset, long value) {
if (inFirstSegment(segments, offset, 8)) {
segments[0].putLong(offset, value);
} else {
setLongMultiSegments(segments, offset, value);
}
}
private static void setLongMultiSegments(MemorySegment[] segments, int offset, long value) {
int segSize = segments[0].size();
int segIndex = offset / segSize;
int segOffset = offset - segIndex * segSize; // equal to %
if (segOffset < segSize - 7) {
segments[segIndex].putLong(segOffset, value);
} else {
setLongSlowly(segments, segSize, segIndex, segOffset, value);
}
}
private static void setLongSlowly(
MemorySegment[] segments, int segSize, int segNum, int segOffset, long value) {
MemorySegment segment = segments[segNum];
for (int i = 0; i < 8; i++) {
if (segOffset == segSize) {
segment = segments[++segNum];
segOffset = 0;
}
long unsignedByte;
if (LITTLE_ENDIAN) {
unsignedByte = value >> (i * 8);
} else {
unsignedByte = value >> ((7 - i) * 8);
}
segment.put(segOffset, (byte) unsignedByte);
segOffset++;
}
}
/**
* get short from segments.
*
* @param segments target segments.
* @param offset value offset.
*/
public static short getShort(MemorySegment[] segments, int offset) {
if (inFirstSegment(segments, offset, 2)) {
return segments[0].getShort(offset);
} else {
return getShortMultiSegments(segments, offset);
}
}
private static short getShortMultiSegments(MemorySegment[] segments, int offset) {
int segSize = segments[0].size();
int segIndex = offset / segSize;
int segOffset = offset - segIndex * segSize; // equal to %
if (segOffset < segSize - 1) {
return segments[segIndex].getShort(segOffset);
} else {
return (short) getTwoByteSlowly(segments, segSize, segIndex, segOffset);
}
}
/**
* set short from segments.
*
* @param segments target segments.
* @param offset value offset.
*/
public static void setShort(MemorySegment[] segments, int offset, short value) {
if (inFirstSegment(segments, offset, 2)) {
segments[0].putShort(offset, value);
} else {
setShortMultiSegments(segments, offset, value);
}
}
private static void setShortMultiSegments(MemorySegment[] segments, int offset, short value) {
int segSize = segments[0].size();
int segIndex = offset / segSize;
int segOffset = offset - segIndex * segSize; // equal to %
if (segOffset < segSize - 1) {
segments[segIndex].putShort(segOffset, value);
} else {
setTwoByteSlowly(segments, segSize, segIndex, segOffset, value, value >> 8);
}
}
/**
* get float from segments.
*
* @param segments target segments.
* @param offset value offset.
*/
public static float getFloat(MemorySegment[] segments, int offset) {
if (inFirstSegment(segments, offset, 4)) {
return segments[0].getFloat(offset);
} else {
return getFloatMultiSegments(segments, offset);
}
}
private static float getFloatMultiSegments(MemorySegment[] segments, int offset) {
int segSize = segments[0].size();
int segIndex = offset / segSize;
int segOffset = offset - segIndex * segSize; // equal to %
if (segOffset < segSize - 3) {
return segments[segIndex].getFloat(segOffset);
} else {
return Float.intBitsToFloat(getIntSlowly(segments, segSize, segIndex, segOffset));
}
}
/**
* set float from segments.
*
* @param segments target segments.
* @param offset value offset.
*/
public static void setFloat(MemorySegment[] segments, int offset, float value) {
if (inFirstSegment(segments, offset, 4)) {
segments[0].putFloat(offset, value);
} else {
setFloatMultiSegments(segments, offset, value);
}
}
private static void setFloatMultiSegments(MemorySegment[] segments, int offset, float value) {
int segSize = segments[0].size();
int segIndex = offset / segSize;
int segOffset = offset - segIndex * segSize; // equal to %
if (segOffset < segSize - 3) {
segments[segIndex].putFloat(segOffset, value);
} else {
setIntSlowly(segments, segSize, segIndex, segOffset, Float.floatToRawIntBits(value));
}
}
/**
* get double from segments.
*
* @param segments target segments.
* @param offset value offset.
*/
public static double getDouble(MemorySegment[] segments, int offset) {
if (inFirstSegment(segments, offset, 8)) {
return segments[0].getDouble(offset);
} else {
return getDoubleMultiSegments(segments, offset);
}
}
private static double getDoubleMultiSegments(MemorySegment[] segments, int offset) {
int segSize = segments[0].size();
int segIndex = offset / segSize;
int segOffset = offset - segIndex * segSize; // equal to %
if (segOffset < segSize - 7) {
return segments[segIndex].getDouble(segOffset);
} else {
return Double.longBitsToDouble(getLongSlowly(segments, segSize, segIndex, segOffset));
}
}
/**
* set double from segments.
*
* @param segments target segments.
* @param offset value offset.
*/
public static void setDouble(MemorySegment[] segments, int offset, double value) {
if (inFirstSegment(segments, offset, 8)) {
segments[0].putDouble(offset, value);
} else {
setDoubleMultiSegments(segments, offset, value);
}
}
private static void setDoubleMultiSegments(MemorySegment[] segments, int offset, double value) {
int segSize = segments[0].size();
int segIndex = offset / segSize;
int segOffset = offset - segIndex * segSize; // equal to %
if (segOffset < segSize - 7) {
segments[segIndex].putDouble(segOffset, value);
} else {
setLongSlowly(
segments, segSize, segIndex, segOffset, Double.doubleToRawLongBits(value));
}
}
private static int getTwoByteSlowly(
MemorySegment[] segments, int segSize, int segNum, int segOffset) {
MemorySegment segment = segments[segNum];
int ret = 0;
for (int i = 0; i < 2; i++) {
if (segOffset == segSize) {
segment = segments[++segNum];
segOffset = 0;
}
int unsignedByte = segment.get(segOffset) & 0xff;
if (LITTLE_ENDIAN) {
ret |= (unsignedByte << (i * 8));
} else {
ret |= (unsignedByte << ((1 - i) * 8));
}
segOffset++;
}
return ret;
}
private static void setTwoByteSlowly(
MemorySegment[] segments, int segSize, int segNum, int segOffset, int b1, int b2) {
MemorySegment segment = segments[segNum];
segment.put(segOffset, (byte) (LITTLE_ENDIAN ? b1 : b2));
segOffset++;
if (segOffset == segSize) {
segment = segments[++segNum];
segOffset = 0;
}
segment.put(segOffset, (byte) (LITTLE_ENDIAN ? b2 : b1));
}
/**
* Find equal segments2 in segments1.
*
* @param segments1 segs to find.
* @param segments2 sub segs.
* @return Return the found offset, return -1 if not find.
*/
public static int find(
MemorySegment[] segments1,
int offset1,
int numBytes1,
MemorySegment[] segments2,
int offset2,
int numBytes2) {
if (numBytes2 == 0) { // quick way 1.
return offset1;
}
if (inFirstSegment(segments1, offset1, numBytes1)
&& inFirstSegment(segments2, offset2, numBytes2)) {
byte first = segments2[0].get(offset2);
int end = numBytes1 - numBytes2 + offset1;
for (int i = offset1; i <= end; i++) {
// quick way 2: equal first byte.
if (segments1[0].get(i) == first
&& segments1[0].equalTo(segments2[0], i, offset2, numBytes2)) {
return i;
}
}
return -1;
} else {
return findInMultiSegments(
segments1, offset1, numBytes1, segments2, offset2, numBytes2);
}
}
private static int findInMultiSegments(
MemorySegment[] segments1,
int offset1,
int numBytes1,
MemorySegment[] segments2,
int offset2,
int numBytes2) {
int end = numBytes1 - numBytes2 + offset1;
for (int i = offset1; i <= end; i++) {
if (equalsMultiSegments(segments1, i, segments2, offset2, numBytes2)) {
return i;
}
}
return -1;
}
}
|
googleapis/google-cloud-java | 35,922 | java-dataplex/proto-google-cloud-dataplex-v1/src/main/java/com/google/cloud/dataplex/v1/UpdateGlossaryCategoryRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataplex/v1/business_glossary.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dataplex.v1;
/**
*
*
* <pre>
* Update GlossaryCategory Request
* </pre>
*
* Protobuf type {@code google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest}
*/
public final class UpdateGlossaryCategoryRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest)
UpdateGlossaryCategoryRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateGlossaryCategoryRequest.newBuilder() to construct.
private UpdateGlossaryCategoryRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateGlossaryCategoryRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateGlossaryCategoryRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataplex.v1.BusinessGlossaryProto
.internal_static_google_cloud_dataplex_v1_UpdateGlossaryCategoryRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataplex.v1.BusinessGlossaryProto
.internal_static_google_cloud_dataplex_v1_UpdateGlossaryCategoryRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest.class,
com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest.Builder.class);
}
private int bitField0_;
public static final int CATEGORY_FIELD_NUMBER = 1;
private com.google.cloud.dataplex.v1.GlossaryCategory category_;
/**
*
*
* <pre>
* Required. The GlossaryCategory to update.
* The GlossaryCategory's `name` field is used to identify the
* GlossaryCategory to update. Format:
* projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id}
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryCategory category = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the category field is set.
*/
@java.lang.Override
public boolean hasCategory() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The GlossaryCategory to update.
* The GlossaryCategory's `name` field is used to identify the
* GlossaryCategory to update. Format:
* projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id}
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryCategory category = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The category.
*/
@java.lang.Override
public com.google.cloud.dataplex.v1.GlossaryCategory getCategory() {
return category_ == null
? com.google.cloud.dataplex.v1.GlossaryCategory.getDefaultInstance()
: category_;
}
/**
*
*
* <pre>
* Required. The GlossaryCategory to update.
* The GlossaryCategory's `name` field is used to identify the
* GlossaryCategory to update. Format:
* projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id}
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryCategory category = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.dataplex.v1.GlossaryCategoryOrBuilder getCategoryOrBuilder() {
return category_ == null
? com.google.cloud.dataplex.v1.GlossaryCategory.getDefaultInstance()
: category_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getCategory());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getCategory());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest)) {
return super.equals(obj);
}
com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest other =
(com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest) obj;
if (hasCategory() != other.hasCategory()) return false;
if (hasCategory()) {
if (!getCategory().equals(other.getCategory())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasCategory()) {
hash = (37 * hash) + CATEGORY_FIELD_NUMBER;
hash = (53 * hash) + getCategory().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Update GlossaryCategory Request
* </pre>
*
* Protobuf type {@code google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest)
com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataplex.v1.BusinessGlossaryProto
.internal_static_google_cloud_dataplex_v1_UpdateGlossaryCategoryRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataplex.v1.BusinessGlossaryProto
.internal_static_google_cloud_dataplex_v1_UpdateGlossaryCategoryRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest.class,
com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest.Builder.class);
}
// Construct using com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getCategoryFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
category_ = null;
if (categoryBuilder_ != null) {
categoryBuilder_.dispose();
categoryBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dataplex.v1.BusinessGlossaryProto
.internal_static_google_cloud_dataplex_v1_UpdateGlossaryCategoryRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest getDefaultInstanceForType() {
return com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest build() {
com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest buildPartial() {
com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest result =
new com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.category_ = categoryBuilder_ == null ? category_ : categoryBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest) {
return mergeFrom((com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest other) {
if (other == com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest.getDefaultInstance())
return this;
if (other.hasCategory()) {
mergeCategory(other.getCategory());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getCategoryFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.dataplex.v1.GlossaryCategory category_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataplex.v1.GlossaryCategory,
com.google.cloud.dataplex.v1.GlossaryCategory.Builder,
com.google.cloud.dataplex.v1.GlossaryCategoryOrBuilder>
categoryBuilder_;
/**
*
*
* <pre>
* Required. The GlossaryCategory to update.
* The GlossaryCategory's `name` field is used to identify the
* GlossaryCategory to update. Format:
* projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id}
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryCategory category = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the category field is set.
*/
public boolean hasCategory() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The GlossaryCategory to update.
* The GlossaryCategory's `name` field is used to identify the
* GlossaryCategory to update. Format:
* projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id}
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryCategory category = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The category.
*/
public com.google.cloud.dataplex.v1.GlossaryCategory getCategory() {
if (categoryBuilder_ == null) {
return category_ == null
? com.google.cloud.dataplex.v1.GlossaryCategory.getDefaultInstance()
: category_;
} else {
return categoryBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The GlossaryCategory to update.
* The GlossaryCategory's `name` field is used to identify the
* GlossaryCategory to update. Format:
* projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id}
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryCategory category = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setCategory(com.google.cloud.dataplex.v1.GlossaryCategory value) {
if (categoryBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
category_ = value;
} else {
categoryBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The GlossaryCategory to update.
* The GlossaryCategory's `name` field is used to identify the
* GlossaryCategory to update. Format:
* projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id}
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryCategory category = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setCategory(
com.google.cloud.dataplex.v1.GlossaryCategory.Builder builderForValue) {
if (categoryBuilder_ == null) {
category_ = builderForValue.build();
} else {
categoryBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The GlossaryCategory to update.
* The GlossaryCategory's `name` field is used to identify the
* GlossaryCategory to update. Format:
* projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id}
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryCategory category = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeCategory(com.google.cloud.dataplex.v1.GlossaryCategory value) {
if (categoryBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& category_ != null
&& category_ != com.google.cloud.dataplex.v1.GlossaryCategory.getDefaultInstance()) {
getCategoryBuilder().mergeFrom(value);
} else {
category_ = value;
}
} else {
categoryBuilder_.mergeFrom(value);
}
if (category_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The GlossaryCategory to update.
* The GlossaryCategory's `name` field is used to identify the
* GlossaryCategory to update. Format:
* projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id}
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryCategory category = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearCategory() {
bitField0_ = (bitField0_ & ~0x00000001);
category_ = null;
if (categoryBuilder_ != null) {
categoryBuilder_.dispose();
categoryBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The GlossaryCategory to update.
* The GlossaryCategory's `name` field is used to identify the
* GlossaryCategory to update. Format:
* projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id}
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryCategory category = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dataplex.v1.GlossaryCategory.Builder getCategoryBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getCategoryFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The GlossaryCategory to update.
* The GlossaryCategory's `name` field is used to identify the
* GlossaryCategory to update. Format:
* projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id}
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryCategory category = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dataplex.v1.GlossaryCategoryOrBuilder getCategoryOrBuilder() {
if (categoryBuilder_ != null) {
return categoryBuilder_.getMessageOrBuilder();
} else {
return category_ == null
? com.google.cloud.dataplex.v1.GlossaryCategory.getDefaultInstance()
: category_;
}
}
/**
*
*
* <pre>
* Required. The GlossaryCategory to update.
* The GlossaryCategory's `name` field is used to identify the
* GlossaryCategory to update. Format:
* projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id}
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryCategory category = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataplex.v1.GlossaryCategory,
com.google.cloud.dataplex.v1.GlossaryCategory.Builder,
com.google.cloud.dataplex.v1.GlossaryCategoryOrBuilder>
getCategoryFieldBuilder() {
if (categoryBuilder_ == null) {
categoryBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataplex.v1.GlossaryCategory,
com.google.cloud.dataplex.v1.GlossaryCategory.Builder,
com.google.cloud.dataplex.v1.GlossaryCategoryOrBuilder>(
getCategory(), getParentForChildren(), isClean());
category_ = null;
}
return categoryBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest)
private static final com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest();
}
public static com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateGlossaryCategoryRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateGlossaryCategoryRequest>() {
@java.lang.Override
public UpdateGlossaryCategoryRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateGlossaryCategoryRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateGlossaryCategoryRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dataplex.v1.UpdateGlossaryCategoryRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/sis | 36,134 | endorsed/src/org.apache.sis.storage.geotiff/main/org/apache/sis/storage/geotiff/GeoTiffStore.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.storage.geotiff;
import java.util.Set;
import java.util.List;
import java.util.Locale;
import java.util.TimeZone;
import java.util.Optional;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.time.ZoneId;
import java.net.URI;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.awt.image.RenderedImage;
import java.awt.image.RasterFormatException;
import org.opengis.util.NameSpace;
import org.opengis.util.NameFactory;
import org.opengis.util.GenericName;
import org.opengis.metadata.Metadata;
import org.opengis.metadata.maintenance.ScopeCode;
import org.opengis.parameter.ParameterValueGroup;
import org.apache.sis.setup.OptionKey;
import org.apache.sis.storage.Aggregate;
import org.apache.sis.storage.GridCoverageResource;
import org.apache.sis.storage.DataStore;
import org.apache.sis.storage.DataStoreProvider;
import org.apache.sis.storage.StorageConnector;
import org.apache.sis.storage.DataStoreException;
import org.apache.sis.storage.DataStoreClosedException;
import org.apache.sis.storage.ReadOnlyStorageException;
import org.apache.sis.storage.WriteOnlyStorageException;
import org.apache.sis.storage.IncompatibleResourceException;
import org.apache.sis.storage.IllegalNameException;
import org.apache.sis.storage.base.MetadataBuilder;
import org.apache.sis.storage.base.StoreUtilities;
import org.apache.sis.storage.base.URIDataStore;
import org.apache.sis.storage.base.URIDataStoreProvider;
import org.apache.sis.storage.base.GridResourceWrapper;
import org.apache.sis.storage.event.StoreEvent;
import org.apache.sis.storage.event.StoreListener;
import org.apache.sis.storage.event.StoreListeners;
import org.apache.sis.storage.event.WarningEvent;
import org.apache.sis.storage.modifier.CoverageModifier;
import org.apache.sis.io.stream.ChannelDataInput;
import org.apache.sis.io.stream.ChannelDataOutput;
import org.apache.sis.io.stream.IOUtilities;
import org.apache.sis.coverage.SubspaceNotSpecifiedException;
import org.apache.sis.coverage.grid.GridCoverage;
import org.apache.sis.coverage.grid.GridGeometry;
import org.apache.sis.util.ArgumentChecks;
import org.apache.sis.util.SimpleInternationalString;
import org.apache.sis.util.internal.shared.Constants;
import org.apache.sis.util.internal.shared.ListOfUnknownSize;
import org.apache.sis.util.collection.BackingStoreException;
import org.apache.sis.util.collection.TreeTable;
import org.apache.sis.util.iso.DefaultNameFactory;
import org.apache.sis.util.iso.DefaultNameSpace;
import org.apache.sis.util.resources.Errors;
/**
* A data store backed by GeoTIFF files.
*
* @author Rémi Maréchal (Geomatys)
* @author Martin Desruisseaux (Geomatys)
* @author Thi Phuong Hao Nguyen (VNSC)
* @author Alexis Manin (Geomatys)
* @version 1.5
* @since 0.8
*/
public class GeoTiffStore extends DataStore implements Aggregate {
/**
* The encoding of strings in the metadata. The TIFF specification said that is shall be US-ASCII,
* but Apache SIS nevertheless let the user specifies an alternative encoding if needed.
*/
final Charset encoding;
/**
* The GeoTIFF reader implementation, or {@code null} if the store has been closed.
*
* @see #reader()
*/
private volatile Reader reader;
/**
* The GeoTIFF writer implementation, or {@code null} if the store has been closed.
*
* @see #writer()
*/
private volatile Writer writer;
/**
* The compression to apply when writing tiles, or {@code null} if unspecified.
*
* @see #getCompression()
*/
private final Compression compression;
/**
* The locale to use for formatting metadata. This is not necessarily the same as {@link #getLocale()},
* which is about formatting error messages. A null value means "unlocalized", which is usually English.
*/
final Locale dataLocale;
/**
* The timezone for the date and time parsing, or {@code null} for the default.
*/
private final ZoneId timezone;
/**
* The object to use for parsing and formatting dates. Created when first needed.
*
* @see #getDateFormat()
*/
private transient DateFormat dateFormat;
/**
* The {@link GeoTiffStoreProvider#LOCATION} parameter value, or {@code null} if none.
* This is used for information purpose only, not for actual reading operations.
*
* @see #getOpenParameters()
*/
private final URI location;
/**
* Same value as {@link #location} but as a path, or {@code null} if none.
* Stored separately because conversion from path to URI back to path is not
* looseness (relative paths become absolutes).
*
* @todo May become an array later if we want to handle TFW and PRJ file here.
*/
final Path path;
/**
* The factory to use for creating image identifiers.
*/
final NameFactory nameFactory;
/**
* The data store identifier created from the filename, or {@code null} if none.
* Defined as a namespace for use as the scope of children resources (the images).
* This is created when first needed.
*
* <h4>Design note</h4>
* We do not create this field in the constructor because this value can be provided by
* the user-specified {@link #customizer}, which would receive a reference to {@code this}
* before its construction is completed.
*
* @see #namespace()
*/
private NameSpace namespace;
/**
* Whether {@link #namespace} has been determined.
* Note that the resulting namespace may still be null.
*
* @see #namespace()
*/
private boolean isNamespaceSet;
/**
* The metadata, or {@code null} if not yet created.
*
* @see #getMetadata()
*/
private Metadata metadata;
/**
* The native metadata, or {@code null} if not yet created.
*
* @see #getNativeMetadata()
*/
private TreeTable nativeMetadata;
/**
* Description of images in this GeoTIFF files. This collection is created only when first needed.
*
* @see #components()
*/
private Components components;
/**
* Whether this {@code GeotiffStore} will be hidden. If {@code true}, then some metadata that would
* normally be provided in this {@code GeoTiffStore} will be provided by individual components instead.
*/
final boolean hidden;
/**
* The user-specified method for customizing the band definitions. Never {@code null}.
*/
final CoverageModifier customizer;
/**
* Creates a new GeoTIFF store from the given file, URL or stream object.
* This constructor invokes {@link StorageConnector#closeAllExcept(Object)},
* keeping open only the needed resource.
*
* @param provider the factory that created this {@code DataStore} instance, or {@code null} if unspecified.
* @param connector information about the storage (URL, stream, <i>etc</i>).
* @throws DataStoreException if an error occurred while opening the GeoTIFF file.
*
* @since 1.5
*/
public GeoTiffStore(final DataStoreProvider provider, final StorageConnector connector) throws DataStoreException {
this(null, provider, connector, false);
}
/**
* Creates a new GeoTIFF store as a component of a larger data store.
* If the {@code hidden} parameter is {@code true}, some metadata that would normally be
* provided in this {@code GeoTiffStore} will be provided by individual components instead.
*
* <h4>Example</h4>
* A Landsat data set is a collection of files in a directory or ZIP file,
* which includes more than 10 GeoTIFF files (one image per band or product for a scene).
* {@link org.apache.sis.storage.landsat.LandsatStore} is a data store opening the Landsat
* metadata file as the main file, then opening each band/product using a GeoTIFF data store.
* Those bands/products are components of the Landsat data store.
*
* @param parent the parent that contains this new GeoTIFF store component, or {@code null} if none.
* @param provider the factory that created this {@code DataStore} instance, or {@code null} if unspecified.
* @param connector information about the storage (URL, stream, <i>etc</i>).
* @param hidden {@code true} if this GeoTIFF store will not be directly accessible from the parent.
* It is the case if the parent store will expose only some {@linkplain #components()
* components} instead of the GeoTIFF store itself.
* @throws DataStoreException if an error occurred while opening the GeoTIFF file.
*
* @since 1.1
*/
@SuppressWarnings("this-escape") // `this` appears in a cyclic graph.
public GeoTiffStore(final DataStore parent, final DataStoreProvider provider, final StorageConnector connector,
final boolean hidden) throws DataStoreException
{
super(parent, provider, connector, hidden);
this.hidden = hidden;
nameFactory = DefaultNameFactory.provider();
customizer = CoverageModifier.getOrDefault(connector);
@SuppressWarnings("LocalVariableHidesMemberVariable")
final Charset encoding = connector.getOption(OptionKey.ENCODING);
this.encoding = (encoding != null) ? encoding : StandardCharsets.US_ASCII;
compression = connector.getOption(Compression.OPTION_KEY);
dataLocale = connector.getOption(OptionKey.LOCALE);
timezone = connector.getOption(OptionKey.TIMEZONE);
location = connector.getStorageAs(URI.class);
path = connector.getStorageAs(Path.class);
try {
if (URIDataStoreProvider.isWritable(connector, true)) {
ChannelDataOutput output = URIDataStoreProvider.openAndSetNativeByteOrder(connector, Constants.GEOTIFF);
writer = new Writer(this, output, connector.getOption(FormatModifier.OPTION_KEY));
} else {
ChannelDataInput input = connector.commit(ChannelDataInput.class, Constants.GEOTIFF);
reader = new Reader(this, input);
}
} catch (IOException e) {
throw new DataStoreException(e);
}
}
/**
* Returns the namespace to use in component identifiers, or {@code null} if none.
* This method must be invoked inside a block synchronized on {@code this}.
*
* @throws DataStoreException if an error occurred while computing an identifier.
*/
private NameSpace namespace() throws DataStoreException {
assert Thread.holdsLock(this);
if (!isNamespaceSet && (reader != null || writer != null)) {
GenericName name = null;
/*
* We test `location != null` because if the location was not convertible to URI,
* then the string representation is probably a class name, which is not useful.
*/
if (location != null) {
String filename = (reader != null ? reader.input : writer.output).filename;
filename = IOUtilities.filenameWithoutExtension(filename);
name = nameFactory.createLocalName(null, filename);
}
name = customizer.customize(new CoverageModifier.Source(this), name);
if (name != null) {
namespace = nameFactory.createNameSpace(name, null);
}
isNamespaceSet = true;
}
return namespace;
}
/**
* Creates a name in the namespace of this store.
* This method must be invoked inside a block synchronized on {@code this}.
*
* @param tip the tip of the name to create.
* @return a name in the scope of this store.
* @throws DataStoreException if an error occurred while computing an identifier.
*/
final GenericName createLocalName(final String tip) throws DataStoreException {
return nameFactory.createLocalName(namespace(), tip);
}
/**
* Opens access to listeners for {@link ImageFileDirectory}.
*/
final StoreListeners listeners() {
return listeners;
}
/**
* Returns the parameters used to open this GeoTIFF data store.
* The parameters are described by {@link GeoTiffStoreProvider#getOpenParameters()} and contains at least
* a parameter named {@value org.apache.sis.storage.DataStoreProvider#LOCATION} with a {@link URI} value.
* The return value may be empty if the storage input cannot be described by a URI
* (for example a GeoTIFF file reading directly from a {@link java.nio.channels.ReadableByteChannel}).
*
* @return parameters used for opening this data store.
*/
@Override
public Optional<ParameterValueGroup> getOpenParameters() {
final ParameterValueGroup param = URIDataStore.parameters(provider, location);
if (param != null) {
final Writer w = writer;
if (w != null) {
final Set<FormatModifier> modifiers = w.getModifiers();
if (!modifiers.isEmpty()) {
param.parameter(GeoTiffStoreProvider.MODIFIERS).setValue(modifiers.toArray(FormatModifier[]::new));
}
if (compression != null) {
param.parameter(GeoTiffStoreProvider.COMPRESSION).setValue(compression);
}
}
}
return Optional.ofNullable(param);
}
/**
* Returns the modifiers (BigTIFF, COG…) of this data store.
*
* @return format modifiers of this data store.
*
* @since 1.5
*/
public Set<FormatModifier> getModifiers() {
final Writer w = writer; if (w != null) return w.getModifiers();
final Reader r = reader; if (r != null) return r.getModifiers();
return Set.of();
}
/**
* Returns the compression used when writing tiles.
* This is not necessarily the compression of images to be read.
* For the compression of existing images, see {@linkplain #getMetadata() the metadata}.
*
* @return the compression to use for writing new images, or empty if unspecified.
*
* @since 1.5
*/
public Optional<Compression> getCompression() {
return Optional.ofNullable(compression);
}
/**
* Returns an identifier constructed from the name of the TIFF file.
* An identifier is available only if the storage input specified at construction time was something convertible to
* {@link java.net.URI}, for example an {@link java.net.URL}, {@link java.io.File} or {@link java.nio.file.Path}.
*
* @return the identifier derived from the filename.
* @throws DataStoreException if an error occurred while fetching the identifier.
*
* @since 1.0
*/
@Override
public Optional<GenericName> getIdentifier() throws DataStoreException {
@SuppressWarnings("LocalVariableHidesMemberVariable")
final NameSpace namespace;
synchronized (this) {
namespace = namespace();
}
return (namespace != null) ? Optional.of(namespace.name()) : Optional.empty();
}
/**
* Sets the {@code metadata/identificationInfo/resourceFormat} node to "GeoTIFF" format.
*/
final void setFormatInfo(final MetadataBuilder builder) {
builder.setPredefinedFormat(Constants.GEOTIFF, listeners, true);
builder.addFormatReaderSIS(Constants.GEOTIFF);
builder.addLanguage(Locale.ENGLISH, encoding, MetadataBuilder.Scope.METADATA);
builder.addResourceScope(ScopeCode.valueOf("COVERAGE"), null);
}
/**
* Returns information about the dataset as a whole. The returned metadata object can contain information
* such as the spatiotemporal extent of the dataset, contact information about the creator or distributor,
* data quality, usage constraints and more.
*
* @return information about the dataset.
* @throws DataStoreException if an error occurred while reading the data.
*/
@Override
public synchronized Metadata getMetadata() throws DataStoreException {
if (metadata == null) {
@SuppressWarnings("LocalVariableHidesMemberVariable")
final Reader reader = reader();
final var builder = new MetadataBuilder();
setFormatInfo(builder);
int n = 0;
try {
GridCoverageResource dir;
while ((dir = reader.getImage(n++)) != null) {
builder.addFromComponent(dir.getMetadata());
}
} catch (IOException e) {
throw errorIO(e);
} catch (ArithmeticException e) {
listeners.warning(e);
}
/*
* Add the filename as an identifier only if the input was something convertible to URI (URL, File or Path),
* otherwise reader.input.filename may not be useful; it may be just the InputStream classname. If the TIFF
* file did not specify any ImageDescription tag, then we will add the filename as a title instead of an
* identifier because the title is mandatory in ISO 19115 metadata.
*/
getIdentifier().ifPresent((id) -> {
builder.addIdentifier(id, MetadataBuilder.Scope.ALL);
// Replace the `ResourceInternationalString` for "Image 1".
if (!(builder.getTitle() instanceof SimpleInternationalString)) {
builder.setTitle(id.toString());
}
});
builder.setISOStandards(true);
metadata = customizer.customize(new CoverageModifier.Source(this), builder.build());
}
return metadata;
}
/**
* Returns TIFF tags and GeoTIFF keys as a tree for debugging purpose.
* The tags and keys appear in the order they are declared in the file.
* The columns are tag numerical code as an {@link Integer},
* tag name as a {@link String} and value as an {@link Object}.
*
* <p>This method should not be invoked during normal operations;
* the {@linkplain #getMetadata() standard metadata} are preferred
* because they allow abstraction of data format details.
* Native metadata should be used only when an information does not appear in standard metadata,
* or for debugging purposes.</p>
*
* <h4>Performance note</h4>
* Since this method should not be invoked in normal operations, it has not been tuned for performance.
* Invoking this method may cause a lot of {@linkplain java.nio.channels.SeekableByteChannel#position(long)
* seek operations}.
*
* @return resources information structured in an implementation-specific way.
* @throws DataStoreException if an error occurred while reading the metadata.
*
* @since 1.2
*/
@Override
public synchronized Optional<TreeTable> getNativeMetadata() throws DataStoreException {
if (nativeMetadata == null) try {
nativeMetadata = new NativeMetadata(getLocale()).read(reader());
} catch (IOException e) {
throw errorIO(e);
}
return Optional.of(nativeMetadata);
}
/**
* Returns the paths to the files used by this GeoTIFF store.
* The fileset contains the path of the file given at construction time.
*
* @return files used by this resource, or an empty value if unknown.
* @throws DataStoreException if an error occurred while preparing the set of files.
*
* @since 1.5
*/
@Override
public Optional<FileSet> getFileSet() throws DataStoreException {
return (path != null) ? Optional.of(new FileSet(path)) : Optional.empty();
}
/**
* {@return the object to use for parsing and formatting dates}.
*/
final DateFormat getDateFormat() {
if (dateFormat == null) {
dateFormat = new SimpleDateFormat("yyyy:MM:dd HH:mm:ss", Locale.US);
if (timezone != null) {
dateFormat.setTimeZone(TimeZone.getTimeZone(timezone));
}
}
return dateFormat;
}
/**
* Returns the reader if it is not closed, or throws an exception otherwise.
*
* @return the reader, potentially created when first needed.
* @throws WriteOnlyStorageException if the channel is write-only.
*
* @see #close()
*/
private Reader reader() throws DataStoreException {
assert Thread.holdsLock(this);
final Reader r = reader;
if (r == null) {
if (writer != null) {
throw new WriteOnlyStorageException(readOrWriteOnly(1));
}
throw new DataStoreClosedException(getLocale(), Constants.GEOTIFF, StandardOpenOption.READ);
}
return r;
}
/**
* Returns the writer if it can be created and is not closed, or throws an exception otherwise.
* If there is no writer but a reader exists, then a writer is created for writing past the last image.
* After the write operation has been completed, it is caller responsibility to invoke the following code:
*
* {@snippet lang="java":
* writer.synchronize(reader, false);
* // Write the image
* writer.flush();
* writer.synchronize(reader, true);
* }
*
* @return the writer, potentially created when first needed.
* @throws ReadOnlyStorageException if this data store is read-only.
*
* @see #close()
* @see Writer#synchronize(Reader, boolean)
*/
private Writer writer() throws DataStoreException, IOException {
assert Thread.holdsLock(this);
final Reader r = reader;
Writer w = writer;
if (w == null) {
if (r == null) {
throw new DataStoreClosedException(getLocale(), Constants.GEOTIFF, StandardOpenOption.WRITE);
}
writer = w = new Writer(r);
} else if (r != null) {
w.moveAfterExisting(r);
}
return w;
}
/**
* Returns descriptions of all images in this GeoTIFF file.
* Images are not immediately loaded.
*
* <p>If an error occurs during iteration in the returned collection,
* an unchecked {@link BackingStoreException} will be thrown with a {@link DataStoreException} as its cause.</p>
*
* @return descriptions of all images in this GeoTIFF file.
* @throws DataStoreException if an error occurred while fetching the image descriptions.
*
* @since 1.0
*/
@Override
@SuppressWarnings("ReturnOfCollectionOrArrayField")
public synchronized List<GridCoverageResource> components() throws DataStoreException {
if (components == null) {
components = new Components();
}
return components;
}
/**
* The components returned by {@link #components}. Defined as a named class instead of an anonymous
* class for more readable stack trace. This is especially useful since {@link BackingStoreException}
* may happen in any method.
*/
private final class Components extends ListOfUnknownSize<GridCoverageResource> {
/** The collection size, cached when first computed. */
private int size = -1;
/** Creates a new list of components. */
Components() {
}
/** Returns the size or -1 if not yet known. */
@Override protected int sizeIfKnown() {
synchronized (GeoTiffStore.this) {
return size;
}
}
/** Returns the size, computing and caching it if needed. */
@Override public int size() {
synchronized (GeoTiffStore.this) {
if (size < 0) {
size = super.size();
}
return size;
}
}
/** Increments the size by the given number of images. */
final void incrementSize(final int n) {
synchronized (GeoTiffStore.this) {
if (size >= 0) {
size += n;
}
}
}
/** Returns whether the given index is valid. */
@Override protected boolean exists(final int index) {
return (index >= 0) && getImageFileDirectory(index) != null;
}
/** Returns element at the given index or throw {@link IndexOutOfBoundsException}. */
@Override public GridCoverageResource get(final int index) {
if (index >= 0) {
GridCoverageResource image = getImageFileDirectory(index);
if (image != null) return image;
}
throw new IndexOutOfBoundsException(errors().getString(Errors.Keys.IndexOutOfBounds_1, index));
}
/** Returns element at the given index or returns {@code null} if the index is invalid. */
private GridCoverageResource getImageFileDirectory(final int index) {
try {
synchronized (GeoTiffStore.this) {
return reader().getImage(index);
}
} catch (IOException e) {
throw new BackingStoreException(errorIO(e));
} catch (DataStoreException e) {
throw new BackingStoreException(e);
}
}
}
/**
* Returns the image at the given index. Images numbering starts at 1.
* If the given string has a scope (e.g. "filename:1"), then the scope
*
* @param sequence string representation of the image index, starting at 1.
* @return image at the given index.
* @throws DataStoreException if the requested image cannot be obtained.
*/
@Override
public synchronized GridCoverageResource findResource(final String sequence) throws DataStoreException {
ArgumentChecks.ensureNonEmpty("sequence", sequence);
final int index = parseImageIndex(sequence);
if (index >= 0) try {
final GridCoverageResource image = reader().getImage(index - 1);
if (image != null) return image;
} catch (IOException e) {
throw errorIO(e);
}
throw new IllegalNameException(StoreUtilities.resourceNotFound(this, sequence));
}
/**
* Validates input resource name and extracts the image index it should contain.
* The resource name may be of the form "1" or "filename:1". We verify that:
*
* <ul>
* <li>Input tip (last name part) is a parsable integer.</li>
* <li>If input provides more than a tip, all test before the tip matches this datastore namespace
* (should be the name of the Geotiff file without its extension).</li>
* </ul>
*
* @param sequence a string representing the name of a resource present in this datastore.
* @return the index of the Geotiff image matching the requested resource.
* There is no verification that the returned index is valid.
* @throws IllegalNameException if the argument use an invalid namespace or if the tip is not an integer.
* @throws DataStoreException if an exception occurred while computing an identifier.
*/
private int parseImageIndex(String sequence) throws DataStoreException {
@SuppressWarnings("LocalVariableHidesMemberVariable")
final NameSpace namespace = namespace();
final String separator = DefaultNameSpace.getSeparator(namespace, false);
final int s = sequence.lastIndexOf(separator);
if (s >= 0) {
if (namespace != null) {
final String expected = namespace.name().toString();
if (!sequence.substring(0, s).equals(expected)) {
throw new IllegalNameException(errors().getString(Errors.Keys.UnexpectedNamespace_2, expected, sequence));
}
}
sequence = sequence.substring(s + separator.length());
}
try {
return Integer.parseInt(sequence);
} catch (NumberFormatException e) {
throw new IllegalNameException(StoreUtilities.resourceNotFound(this, sequence), e);
}
}
/**
* Encodes the given image in the GeoTIFF file.
* The image is appended after any existing images in the GeoTIFF file.
* This method does not handle pyramids such as Cloud Optimized GeoTIFF (COG).
*
* @param image the image to encode.
* @param grid mapping from pixel coordinates to "real world" coordinates, or {@code null} if none.
* @param metadata title, author and other information, or {@code null} if none.
* @return the effectively added resource. Using this resource may cause data to be reloaded.
* @throws ReadOnlyStorageException if this data store is read-only.
* @throws IncompatibleResourceException if the given {@code image} has a property which is not supported by this writer.
* @throws DataStoreException if an error occurred while writing to the output stream.
*
* @since 1.5
*/
public synchronized GridCoverageResource append(final RenderedImage image, final GridGeometry grid, final Metadata metadata)
throws DataStoreException
{
final int index;
try {
@SuppressWarnings("LocalVariableHidesMemberVariable") final Writer writer = writer();
@SuppressWarnings("LocalVariableHidesMemberVariable") final Reader reader = this.reader;
writer.synchronize(reader, false);
final long offsetIFD;
try {
offsetIFD = writer.append(image, grid, metadata);
} finally {
writer.synchronize(reader, true);
}
if (reader != null) {
reader.offsetOfWrittenIFD(offsetIFD);
}
index = writer.imageIndex++;
} catch (RasterFormatException | ArithmeticException | IllegalArgumentException e) {
throw new IncompatibleResourceException(cannotWrite(), e).addAspect("raster");
} catch (RuntimeException | IOException e) {
throw new DataStoreException(cannotWrite(), e);
}
if (components != null) {
components.incrementSize(1);
}
/*
* Returns a thin wrapper with only a reference to this store and the image index.
* The actual loading of the effectively added resource will be done only if requested.
*/
return new GridResourceWrapper() {
/** The lock to use for synchronization purposes. */
@Override protected Object getSynchronizationLock() {
return GeoTiffStore.this;
}
/** Loads the effectively added resource when first requested. */
@Override protected GridCoverageResource createSource() throws DataStoreException {
try {
synchronized (GeoTiffStore.this) {
return reader().getImage(index);
}
} catch (IOException e) {
throw new DataStoreException(errorIO(e));
}
}
};
}
/**
* Adds a new grid coverage in the GeoTIFF file.
* The coverage is appended after any existing images in the GeoTIFF file.
* This method does not handle pyramids such as Cloud Optimized GeoTIFF (COG).
*
* @param coverage the grid coverage to encode.
* @param metadata title, author and other information, or {@code null} if none.
* @return the effectively added resource. Using this resource may cause data to be reloaded.
* @throws SubspaceNotSpecifiedException if the given grid coverage is not a two-dimensional slice.
* @throws ReadOnlyStorageException if this data store is read-only.
* @throws DataStoreException if the given {@code image} has a property which is not supported by this writer,
* or if an error occurred while writing to the output stream.
*
* @since 1.5
*/
public GridCoverageResource append(final GridCoverage coverage, final Metadata metadata) throws DataStoreException {
return append(coverage.render(null), coverage.getGridGeometry(), metadata);
}
/**
* Registers a listener to notify when the specified kind of event occurs in this data store.
* The current implementation of this data store can emit only {@link WarningEvent}s;
* any listener specified for another kind of events will be ignored.
*/
@Override
public <T extends StoreEvent> void addListener(Class<T> eventType, StoreListener<? super T> listener) {
// If an argument is null, we let the parent class throws (indirectly) NullPointerException.
if (listener == null || eventType == null || eventType.isAssignableFrom(WarningEvent.class)) {
super.addListener(eventType, listener);
}
}
/**
* Returns the error resources in the current locale.
*/
private Errors errors() {
return Errors.forLocale(getLocale());
}
/**
* Returns the exception to throw when an I/O error occurred.
* This method wraps the exception with a {@literal "Cannot read <filename>"} message.
*/
final DataStoreException errorIO(final IOException e) {
return new DataStoreException(errors().getString(Errors.Keys.CanNotRead_1, getDisplayName()), e);
}
/**
* Returns the error message for a file that cannot be written.
*/
private String cannotWrite() {
return errors().getString(Errors.Keys.CanNotWriteFile_2, Constants.GEOTIFF, getDisplayName());
}
/**
* Returns a localized error message saying that this data store has been opened in read-only or write-only mode.
*
* @param mode 0 for read-only, or 1 for write-only.
* @return localized error message.
*/
final String readOrWriteOnly(final int mode) {
return errors().getString(Errors.Keys.OpenedReadOrWriteOnly_2, mode, getDisplayName());
}
/**
* Closes this GeoTIFF store and releases any underlying resources.
* This method can be invoked asynchronously for interrupting a long reading process.
*
* @throws DataStoreException if an error occurred while closing the GeoTIFF file.
*/
@Override
public void close() throws DataStoreException {
try {
listeners.close(); // Should never fail.
final Reader r = reader;
final Writer w = writer;
if (w != null) w.close();
if (r != null) r.close();
} catch (IOException e) {
throw new DataStoreException(e);
} finally {
synchronized (this) {
components = null;
namespace = null;
metadata = null;
nativeMetadata = null;
reader = null;
writer = null;
}
}
}
}
|
google/closure-templates | 35,888 | java/src/com/google/template/soy/pysrc/internal/GenPyCodeVisitor.java | /*
* Copyright 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.template.soy.pysrc.internal;
import static java.util.stream.Collectors.toCollection;
import com.google.auto.value.AutoValue;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.template.soy.error.ErrorReporter;
import com.google.template.soy.error.SoyErrorKind;
import com.google.template.soy.exprtree.ExprNode;
import com.google.template.soy.internal.i18n.SoyBidiUtils;
import com.google.template.soy.pysrc.SoyPySrcOptions;
import com.google.template.soy.pysrc.internal.GenPyExprsVisitor.GenPyExprsVisitorFactory;
import com.google.template.soy.pysrc.restricted.PyExpr;
import com.google.template.soy.pysrc.restricted.PyExprUtils;
import com.google.template.soy.pysrc.restricted.PyFunctionExprBuilder;
import com.google.template.soy.soytree.AbstractSoyNodeVisitor;
import com.google.template.soy.soytree.CallDelegateNode;
import com.google.template.soy.soytree.CallNode;
import com.google.template.soy.soytree.CallParamContentNode;
import com.google.template.soy.soytree.CallParamNode;
import com.google.template.soy.soytree.ConstNode;
import com.google.template.soy.soytree.DebuggerNode;
import com.google.template.soy.soytree.EvalNode;
import com.google.template.soy.soytree.FileSetMetadata;
import com.google.template.soy.soytree.ForNode;
import com.google.template.soy.soytree.ForNonemptyNode;
import com.google.template.soy.soytree.IfCondNode;
import com.google.template.soy.soytree.IfElseNode;
import com.google.template.soy.soytree.IfNode;
import com.google.template.soy.soytree.ImportNode;
import com.google.template.soy.soytree.ImportNode.ImportType;
import com.google.template.soy.soytree.KeyNode;
import com.google.template.soy.soytree.LetContentNode;
import com.google.template.soy.soytree.LetValueNode;
import com.google.template.soy.soytree.LogNode;
import com.google.template.soy.soytree.PrintNode;
import com.google.template.soy.soytree.SoyFileNode;
import com.google.template.soy.soytree.SoyFileSetNode;
import com.google.template.soy.soytree.SoyNode;
import com.google.template.soy.soytree.SoyNode.ConditionalBlockNode;
import com.google.template.soy.soytree.SoyNode.ParentSoyNode;
import com.google.template.soy.soytree.SoyTreeUtils;
import com.google.template.soy.soytree.SwitchCaseNode;
import com.google.template.soy.soytree.SwitchDefaultNode;
import com.google.template.soy.soytree.SwitchNode;
import com.google.template.soy.soytree.TemplateBasicNode;
import com.google.template.soy.soytree.TemplateDelegateNode;
import com.google.template.soy.soytree.TemplateNode;
import com.google.template.soy.soytree.VeLogNode;
import com.google.template.soy.soytree.defn.SymbolVar.SymbolKind;
import com.google.template.soy.soytree.defn.TemplateParam;
import com.google.template.soy.types.ast.TypeNode;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import java.util.TreeSet;
/**
* Visitor for generating full Python code (i.e. statements) for parse tree nodes.
*
* <p>{@link #gen} should be called on a full parse tree. Python source code will be generated for
* all the Soy files. The return value is a list of strings, each string being the content of one
* generated Python file (corresponding to one Soy file).
*/
final class GenPyCodeVisitor extends AbstractSoyNodeVisitor<List<String>> {
/** The options configuration for this run. */
private final SoyPySrcOptions pySrcOptions;
/** The namespace manifest for all current and dependent sources. */
private final ImmutableMap<String, String> namespaceManifest;
private final FileSetMetadata fileSetMetadata;
@VisibleForTesting PyCodeBuilder pyCodeBuilder;
private final IsComputableAsPyExprVisitor isComputableAsPyExprVisitor;
@VisibleForTesting final GenPyExprsVisitorFactory genPyExprsVisitorFactory;
@VisibleForTesting GenPyExprsVisitor genPyExprsVisitor;
private final GenPyCallExprVisitor genPyCallExprVisitor;
private final PythonValueFactoryImpl pluginValueFactory;
/**
* @see LocalVariableStack
*/
@VisibleForTesting LocalVariableStack localVarExprs;
private static final SoyErrorKind DELEGATE_TEMPLATES_UNSUPPORTED =
SoyErrorKind.of("Deltemplates are not supported in python.");
private static final SoyErrorKind MODIFIABLE_TEMPLATES_UNSUPPORTED =
SoyErrorKind.of("Modifiable templates are not supported in python.");
GenPyCodeVisitor(
SoyPySrcOptions pySrcOptions,
ImmutableMap<String, String> currentManifest,
FileSetMetadata fileSetMetadata,
IsComputableAsPyExprVisitor isComputableAsPyExprVisitor,
GenPyExprsVisitorFactory genPyExprsVisitorFactory,
GenPyCallExprVisitor genPyCallExprVisitor,
PythonValueFactoryImpl pluginValueFactory) {
this.pySrcOptions = pySrcOptions;
this.fileSetMetadata = fileSetMetadata;
this.isComputableAsPyExprVisitor = isComputableAsPyExprVisitor;
this.genPyExprsVisitorFactory = genPyExprsVisitorFactory;
this.genPyCallExprVisitor = genPyCallExprVisitor;
this.pluginValueFactory = pluginValueFactory;
this.namespaceManifest =
new ImmutableMap.Builder<String, String>()
.putAll(pySrcOptions.getNamespaceManifest())
.putAll(currentManifest)
.buildOrThrow();
}
public List<String> gen(SoyFileSetNode node, ErrorReporter errorReporter) {
// All these fields should move into Impl but are currently exposed for tests.
pyCodeBuilder = null;
genPyExprsVisitor = null;
localVarExprs = null;
return new Impl(errorReporter).exec(node);
}
@VisibleForTesting
void visitForTesting(SoyNode node, ErrorReporter errorReporter) {
new Impl(errorReporter).exec(node);
}
private final class Impl extends AbstractSoyNodeVisitor<List<String>> {
/** The contents of the generated Python files. */
private List<String> pyFilesContents;
final ErrorReporter errorReporter;
Impl(ErrorReporter reporter) {
this.errorReporter = reporter;
}
@Override
public List<String> exec(SoyNode node) {
pyFilesContents = new ArrayList<>();
visit(node);
return pyFilesContents;
}
/**
* Visit all the children of a provided node and combine the results into one expression where
* possible. This will let us avoid some {@code output.append} calls and save a bit of time.
*/
@Override
protected void visitChildren(ParentSoyNode<?> node) {
// If a conditional block is empty, add 'pass' to prevent indentation errors.
if (node.numChildren() == 0 && node instanceof ConditionalBlockNode) {
pyCodeBuilder.appendLine("pass");
return;
}
// If the first child cannot be written as an expression, we need to init the output variable
// first or face potential scoping issues with the output variable being initialized too late.
if (node.numChildren() > 0 && !isComputableAsPyExprVisitor.exec(node.getChild(0))) {
pyCodeBuilder.initOutputVarIfNecessary();
}
List<PyExpr> childPyExprs = new ArrayList<>();
for (SoyNode child : node.getChildren()) {
if (isComputableAsPyExprVisitor.exec(child)) {
childPyExprs.addAll(genPyExprsVisitor.exec(child));
} else {
// We've reached a child that is not computable as a Python expression.
// First add the PyExprs from preceding consecutive siblings that are computable as Python
// expressions (if any).
if (!childPyExprs.isEmpty()) {
pyCodeBuilder.addToOutputVar(childPyExprs);
childPyExprs.clear();
}
// Now append the code for this child.
visit(child);
}
}
// Add the PyExprs from the last few children (if any).
if (!childPyExprs.isEmpty()) {
pyCodeBuilder.addToOutputVar(childPyExprs);
childPyExprs.clear();
}
}
// ---------------------------------------------------------------------------------------------
// Implementations for specific nodes.
@Override
protected void visitSoyFileSetNode(SoyFileSetNode node) {
for (SoyFileNode soyFile : node.getChildren()) {
visit(soyFile);
}
}
/**
* Visit a SoyFileNode and generate it's Python output.
*
* <p>This visitor generates the necessary imports and configuration needed for all Python
* output files. This includes imports of runtime libraries, external templates called from
* within this file, and namespacing configuration.
*
* <p>Template generation is deferred to other visitors.
*
* <p>Example Output:
*
* <pre>
* # coding=utf-8
* """ This file was automatically generated from my-templates.soy.
* Please don't edit this file by hand.
* """
*
* ...
* </pre>
*/
@Override
protected void visitSoyFileNode(SoyFileNode node) {
localVarExprs = new LocalVariableStack();
pyCodeBuilder = new PyCodeBuilder();
// Encode all source files in utf-8 to allow for special unicode characters in the generated
// literals.
pyCodeBuilder.appendLine("# coding=utf-8");
pyCodeBuilder.appendLine("\"\"\" This file was automatically generated by the Soy compiler.");
pyCodeBuilder.appendLine("Please don't edit this file by hand.");
// This "source" comment makes Code Search link the gencode to the Soy source:
pyCodeBuilder.appendLine("source: ", node.getFilePath().path());
pyCodeBuilder.appendLine();
pyCodeBuilder.appendLine("SOY_NAMESPACE: '" + node.getNamespace() + "'.");
// Output a section containing optionally-parsed compiler directives in comments.
pyCodeBuilder.appendLine();
if (node.getNamespace() != null) {
pyCodeBuilder.appendLine("Templates in namespace ", node.getNamespace(), ".");
}
pyCodeBuilder.appendLine("\"\"\"");
// Add code to define Python namespaces and add import calls for libraries.
pyCodeBuilder.appendLine();
addCodeToRequireGeneralDeps();
addCodeToRequireSoyNamespaces(node);
addCodeToFixUnicodeStrings();
if (SoyTreeUtils.hasNodesOfType(node, DebuggerNode.class)) {
pyCodeBuilder.appendLine("import pdb");
}
genPyExprsVisitor = genPyExprsVisitorFactory.create(localVarExprs, errorReporter);
localVarExprs.pushFrame();
node.getImports().forEach(this::visit);
// Add code for each constant.
for (ConstNode constNode : node.getConstants()) {
pyCodeBuilder.appendLine().appendLine();
visit(constNode);
}
// Add code for each template.
for (TemplateNode template : node.getTemplates()) {
pyCodeBuilder.appendLine().appendLine();
visit(template);
}
localVarExprs.popFrame();
pyFilesContents.add(pyCodeBuilder.getCode());
pyCodeBuilder = null;
}
@Override
protected void visitImportNode(ImportNode node) {
node.visitVars(
(var) -> {
if (var.getSymbolKind() == SymbolKind.CONST) {
// This is a constant import.
String fullNamespace = fileSetMetadata.getNamespaceForPath(node.getSourceFilePath());
NamespaceAndName namespaceAndName = NamespaceAndName.fromModule(fullNamespace);
localVarExprs.addVariable(
var.name(),
new PyExpr(
namespaceAndName.name() + "." + var.getSymbol() + "()", Integer.MAX_VALUE));
}
});
}
@Override
protected void visitConstNode(ConstNode node) {
String functionName = GenPyCallExprVisitor.getLocalConstName(node);
pyCodeBuilder.appendLine("def ", functionName, "():");
pyCodeBuilder.increaseIndent();
TranslateToPyExprVisitor translator =
new TranslateToPyExprVisitor(localVarExprs, pluginValueFactory, node, errorReporter);
PyExpr value = translator.exec(node.getExpr());
pyCodeBuilder.appendLine("return ", value.getText());
localVarExprs.addVariable(
node.getVar().name(), new PyExpr(functionName + "()", Integer.MAX_VALUE));
// Dedent to end the function.
pyCodeBuilder.decreaseIndent();
}
/**
* Visit a TemplateNode and generate a corresponding function.
*
* <p>Example:
*
* <pre>
* def myfunc(data, ijData):
* output = ''
* ...
* ...
* return output
* </pre>
*/
@Override
protected void visitTemplateNode(TemplateNode node) {
for (TemplateParam param : node.getParams()) {
TypeNode type = param.getTypeNode();
// Skip this if it's a param with a default value and an inferred type. We don't have to
// worry about a legacy_object_map sneaking in through an inferred type because there is
// no legacy_object_map literal syntax: http://b/79368576
if (type != null) {
new LegacyObjectMapFinder(errorReporter).exec(type);
}
}
// Generate function definition up to colon.
pyCodeBuilder.appendLine(
"def ",
GenPyCallExprVisitor.getLocalTemplateName(node),
// These defaults are safe because soy only ever reads from these parameters. If that
// changes, bad things could happen.
"(data={}, ijData={}):");
pyCodeBuilder.increaseIndent();
generateFunctionBody(node);
// Dedent to end the function.
pyCodeBuilder.decreaseIndent();
}
/**
* Visit a TemplateDelegateNode and generate the corresponding function along with the delegate
* registration.
*
* <p>Example:
*
* <pre>
* def myfunc(data=None, ijData=None):
* ...
* runtime.register_delegate_fn('delname', 'delvariant', 0, myfunc, 'myfunc')
* </pre>
*/
@Override
protected void visitTemplateDelegateNode(TemplateDelegateNode node) {
errorReporter.report(node.getSourceLocation(), DELEGATE_TEMPLATES_UNSUPPORTED);
}
/**
* Visit a TemplateDelegateNode and generate the corresponding function along with the delegate
* registration.
*
* <p>Example:
*
* <pre>
* def myfunc(data=None, ijData=None):
* ...
* runtime.register_delegate_fn('delname', 'delvariant', 0, myfunc, 'myfunc')
* </pre>
*/
@Override
protected void visitTemplateBasicNode(TemplateBasicNode node) {
if (node.isModifiable() || node.getModifiesExpr() != null) {
errorReporter.report(node.getSourceLocation(), MODIFIABLE_TEMPLATES_UNSUPPORTED);
}
visitTemplateNode(node);
}
@Override
protected void visitPrintNode(PrintNode node) {
pyCodeBuilder.addToOutputVar(genPyExprsVisitor.exec(node));
}
@Override
protected void visitEvalNode(EvalNode node) {
TranslateToPyExprVisitor translator =
new TranslateToPyExprVisitor(localVarExprs, pluginValueFactory, node, errorReporter);
pyCodeBuilder.appendLine(translator.exec(node.getExpr()).getText());
}
/**
* Visit an IfNode and generate a full conditional statement, or an inline ternary conditional
* expression if all the children are computable as expressions.
*
* <p>Example:
*
* <pre>
* {if $boo > 0}
* ...
* {/if}
* </pre>
*
* might generate
*
* <pre>
* if data.get('boo') > 0:
* ...
* </pre>
*/
@Override
protected void visitIfNode(IfNode node) {
if (isComputableAsPyExprVisitor.exec(node)) {
pyCodeBuilder.addToOutputVar(genPyExprsVisitor.exec(node));
return;
}
// Not computable as Python expressions, so generate full code.
TranslateToPyExprVisitor translator =
new TranslateToPyExprVisitor(localVarExprs, pluginValueFactory, node, errorReporter);
for (SoyNode child : node.getChildren()) {
if (child instanceof IfCondNode) {
IfCondNode icn = (IfCondNode) child;
PyExpr condPyExpr = translator.exec(icn.getExpr());
if (icn.getCommandName().equals("if")) {
pyCodeBuilder.appendLine("if ", condPyExpr.getText(), ":");
} else {
pyCodeBuilder.appendLine("elif ", condPyExpr.getText(), ":");
}
pyCodeBuilder.increaseIndent();
visitChildren(icn);
pyCodeBuilder.decreaseIndent();
} else if (child instanceof IfElseNode) {
pyCodeBuilder.appendLine("else:");
pyCodeBuilder.increaseIndent();
visitChildren((IfElseNode) child);
pyCodeBuilder.decreaseIndent();
} else {
throw new AssertionError("Unexpected if child node type. Child: " + child);
}
}
}
/**
* Python does not support switch statements, so just replace with if: ... elif: ... else: ...
* As some expressions may generate different results each time, the expression is stored before
* conditionals (which prevents expression inlining).
*
* <p>Example:
*
* <pre>
* {switch $boo}
* {case 0}
* ...
* {case 1, 2}
* ...
* {default}
* ...
* {/switch}
* </pre>
*
* might generate
*
* <pre>
* switchValue = data.get('boo')
* if switchValue == 0:
* ...
* elif switchValue == 1:
* ...
* elif switchValue == 2:
* ...
* else:
* ...
* </pre>
*/
@Override
protected void visitSwitchNode(SwitchNode node) {
// Run the switch value creation first to ensure side effects always occur.
TranslateToPyExprVisitor translator =
new TranslateToPyExprVisitor(localVarExprs, pluginValueFactory, node, errorReporter);
String switchValueVarName = "switchValue";
PyExpr switchValuePyExpr = translator.exec(node.getExpr());
pyCodeBuilder.appendLine(switchValueVarName, " = ", switchValuePyExpr.getText());
// If a Switch with only a default is provided (no case statements), just execute the inner
// code directly.
if (node.numChildren() == 1 && node.getChild(0) instanceof SwitchDefaultNode) {
visitChildren(node.getChild(0));
return;
}
boolean isFirstCase = true;
for (SoyNode child : node.getChildren()) {
if (child instanceof SwitchCaseNode) {
SwitchCaseNode scn = (SwitchCaseNode) child;
for (ExprNode caseExpr : scn.getExprList()) {
PyExpr casePyExpr = translator.exec(caseExpr);
PyExpr conditionFn =
new PyFunctionExprBuilder("runtime.type_safe_eq")
.addArg(new PyExpr(switchValueVarName, Integer.MAX_VALUE))
.addArg(casePyExpr)
.asPyExpr();
if (isFirstCase) {
pyCodeBuilder.appendLineStart("if ").append(conditionFn.getText()).appendLineEnd(":");
isFirstCase = false;
} else {
pyCodeBuilder
.appendLineStart("elif ")
.append(conditionFn.getText())
.appendLineEnd(":");
}
pyCodeBuilder.increaseIndent();
visitChildren(scn);
pyCodeBuilder.decreaseIndent();
}
} else if (child instanceof SwitchDefaultNode) {
SwitchDefaultNode sdn = (SwitchDefaultNode) child;
pyCodeBuilder.appendLine("else:");
pyCodeBuilder.increaseIndent();
visitChildren(sdn);
pyCodeBuilder.decreaseIndent();
} else {
throw new AssertionError("Unexpected switch child node type. Child: " + child);
}
}
}
/** Generates code for a for loop. */
@Override
protected void visitForNode(ForNode node) {
ForNonemptyNode nonEmptyNode = (ForNonemptyNode) node.getChild(0);
String baseVarName = nonEmptyNode.getVarName();
String listVarName = String.format("%sList%d", baseVarName, node.getId());
// Define list variable
TranslateToPyExprVisitor translator =
new TranslateToPyExprVisitor(localVarExprs, pluginValueFactory, node, errorReporter);
PyExpr dataRefPyExpr = translator.exec(node.getExpr());
pyCodeBuilder.appendLine(listVarName, " = ", dataRefPyExpr.getText());
// Generate code for nonempty case.
visit(nonEmptyNode);
}
/**
* The ForNonemptyNode performs the actual looping. We use a standard {@code for} loop, except
* that instead of looping directly over the list, we loop over an enumeration to have easy
* access to the index along with the data.
*
* <p>Example:
*
* <pre>
* {for $foo in $boo}
* ...
* {/for}
* </pre>
*
* might generate
*
* <pre>
* fooList2 = data.get('boo')
* for fooIndex2, fooData2 in enumerate(fooList2):
* ...
* </pre>
*/
@Override
protected void visitForNonemptyNode(ForNonemptyNode node) {
// Build the local variable names.
String baseVarName = node.getVarName();
String forNodeId = Integer.toString(node.getForNodeId());
String listVarName = baseVarName + "List" + forNodeId;
String indexVarName = baseVarName + "Index" + forNodeId;
String dataVarName = baseVarName + "Data" + forNodeId;
// Create the loop with an enumeration.
pyCodeBuilder.appendLine(
"for ", indexVarName, ", ", dataVarName, " in enumerate(", listVarName, "):");
pyCodeBuilder.increaseIndent();
// Add a new localVarExprs frame and populate it with the translations from this loop.
localVarExprs.pushFrame();
localVarExprs.addVariable(baseVarName, new PyExpr(dataVarName, Integer.MAX_VALUE));
if (node.getIndexVar() != null) {
localVarExprs.addVariable(
node.getIndexVarName(), new PyExpr(indexVarName, Integer.MAX_VALUE));
}
// Generate the code for the loop body.
visitChildren(node);
// Remove the localVarExprs frame that we added above.
localVarExprs.popFrame();
// The end of the Python 'for' loop.
pyCodeBuilder.decreaseIndent();
}
/**
* Visits a let node which accepts a value and stores it as a unique variable. The unique
* variable name is stored in the LocalVariableStack for use by any subsequent code.
*
* <p>Example:
*
* <pre>
* {let $boo: $foo[$moo] /}
* </pre>
*
* might generate
*
* <pre>
* boo3 = data.get('foo')['moo']
* </pre>
*/
@Override
protected void visitLetValueNode(LetValueNode node) {
String generatedVarName = node.getUniqueVarName();
// Generate code to define the local var.
TranslateToPyExprVisitor translator =
new TranslateToPyExprVisitor(localVarExprs, pluginValueFactory, node, errorReporter);
PyExpr valuePyExpr = translator.exec(node.getExpr());
pyCodeBuilder.appendLine(generatedVarName, " = ", valuePyExpr.getText());
// Add a mapping for generating future references to this local var.
localVarExprs.addVariable(node.getVarName(), new PyExpr(generatedVarName, Integer.MAX_VALUE));
}
/**
* Visits a let node which contains a content section and stores it as a unique variable. The
* unique variable name is stored in the LocalVariableStack for use by any subsequent code.
*
* <p>Note, this is one of the location where Strict mode is enforced in Python templates. As
* such, all LetContentNodes must have a contentKind specified.
*
* <p>Example:
*
* <pre>
* {let $boo kind="html"}
* Hello {$name}
* {/let}
* </pre>
*
* might generate
*
* <pre>
* boo3 = sanitize.SanitizedHtml(''.join(['Hello ', sanitize.escape_html(data.get('name'))])
* </pre>
*/
@Override
protected void visitLetContentNode(LetContentNode node) {
String generatedVarName = node.getUniqueVarName();
// Traverse the children and push them onto the generated variable.
localVarExprs.pushFrame();
pyCodeBuilder.pushOutputVar(generatedVarName);
visitChildren(node);
PyExpr generatedContent = pyCodeBuilder.getOutputAsString();
pyCodeBuilder.popOutputVar();
localVarExprs.popFrame();
// Mark the result as being escaped to the appropriate kind (e.g., "sanitize.SanitizedHtml").
pyCodeBuilder.appendLine(
generatedVarName,
" = ",
InternalPyExprUtils.wrapAsSanitizedContent(node.getContentKind(), generatedContent)
.getText());
// Add a mapping for generating future references to this local var.
localVarExprs.addVariable(node.getVarName(), new PyExpr(generatedVarName, Integer.MAX_VALUE));
}
@Override
protected void visitCallDelegateNode(CallDelegateNode node) {
errorReporter.report(node.getSourceLocation(), DELEGATE_TEMPLATES_UNSUPPORTED);
}
/**
* Visits a call node and generates the syntax needed to call another template. If all of the
* children can be represented as expressions, this is built as an expression itself. If not,
* the non-expression params are saved as {@code param<n>} variables before the function call.
*/
@Override
protected void visitCallNode(CallNode node) {
// If this node has any param children whose contents are not computable as Python
// expressions, visit them to generate code to define their respective 'param<n>' variables.
for (CallParamNode child : node.getChildren()) {
if (child instanceof CallParamContentNode && !isComputableAsPyExprVisitor.exec(child)) {
visit(child);
}
}
pyCodeBuilder.addToOutputVar(
genPyCallExprVisitor.exec(node, localVarExprs, errorReporter).toPyString());
}
/**
* Visits a call param content node which isn't computable as a PyExpr and stores its content in
* a variable with the name {@code param<n>} where n is the node's id.
*/
@Override
protected void visitCallParamContentNode(CallParamContentNode node) {
// This node should only be visited when it's not computable as Python expressions.
Preconditions.checkArgument(
!isComputableAsPyExprVisitor.exec(node),
"Should only define 'param<n>' when not computable as Python expressions.");
pyCodeBuilder.pushOutputVar("param" + node.getId());
pyCodeBuilder.initOutputVarIfNecessary();
visitChildren(node);
pyCodeBuilder.popOutputVar();
}
@Override
protected void visitVeLogNode(VeLogNode node) {
if (node.getLogonlyExpression() != null) {
TranslateToPyExprVisitor translator =
new TranslateToPyExprVisitor(localVarExprs, pluginValueFactory, node, errorReporter);
PyExpr isLogonly = translator.exec(node.getLogonlyExpression());
pyCodeBuilder.appendLine("if ", isLogonly.getText(), ":");
pyCodeBuilder.increaseIndent();
pyCodeBuilder.appendLine(
"raise Exception('Cannot set logonly=\"true\" unless there is a "
+ "logger configured, but pysrc doesn\\'t support loggers')");
pyCodeBuilder.decreaseIndent();
}
// TODO(lukes): expand implementation
visitChildren(node);
}
@Override
protected void visitDebuggerNode(DebuggerNode node) {
pyCodeBuilder.appendLine("pdb.set_trace()");
}
@Override
protected void visitKeyNode(KeyNode node) {
// Do nothing. Outside of incremental dom, key nodes are a no-op.
}
@Override
protected void visitLogNode(LogNode node) {
String outputVarName = "logger_" + node.getId();
pyCodeBuilder.pushOutputVar(outputVarName);
pyCodeBuilder.initOutputVarIfNecessary();
visitChildren(node);
pyCodeBuilder.popOutputVar();
pyCodeBuilder.appendLine("print(" + outputVarName + ")");
}
// ---------------------------------------------------------------------------------------------
// Fallback implementation.
@Override
protected void visitSoyNode(SoyNode node) {
if (isComputableAsPyExprVisitor.exec(node)) {
// Generate Python expressions for this node and add them to the current output var.
pyCodeBuilder.addToOutputVar(genPyExprsVisitor.exec(node));
} else {
// Need to implement visit*Node() for the specific case.
throw new UnsupportedOperationException();
}
}
// ---------------------------------------------------------------------------------------------
// Utility methods.
/** Helper for visitSoyFileNode(SoyFileNode) to add code to require general dependencies. */
private void addCodeToRequireGeneralDeps() {
pyCodeBuilder.appendLine("from __future__ import unicode_literals");
// In python 2, division always return integers. Using python 3 behaviors is better aligned
// with other backends.
pyCodeBuilder.appendLine("from __future__ import division");
pyCodeBuilder.appendLine("import collections");
pyCodeBuilder.appendLine("import math");
pyCodeBuilder.appendLine("import numbers");
pyCodeBuilder.appendLine("import random");
pyCodeBuilder.appendLine("import sys");
// TODO(dcphillips): limit this based on usage?
pyCodeBuilder.appendLine("from ", pySrcOptions.getRuntimePath(), " import bidi");
pyCodeBuilder.appendLine("from ", pySrcOptions.getRuntimePath(), " import directives");
pyCodeBuilder.appendLine("from ", pySrcOptions.getRuntimePath(), " import runtime");
pyCodeBuilder.appendLine("from ", pySrcOptions.getRuntimePath(), " import sanitize");
pyCodeBuilder.appendLine();
if (!pySrcOptions.getBidiIsRtlFn().isEmpty()) {
int dotIndex = pySrcOptions.getBidiIsRtlFn().lastIndexOf('.');
// When importing the module, we'll use the constant name to avoid potential conflicts.
String bidiModulePath = pySrcOptions.getBidiIsRtlFn().substring(0, dotIndex);
NamespaceAndName namespaceAndName = NamespaceAndName.fromModule(bidiModulePath);
pyCodeBuilder.appendLine(
"from ",
namespaceAndName.namespace(),
" import ",
namespaceAndName.name(),
" as ",
SoyBidiUtils.IS_RTL_MODULE_ALIAS);
}
// Add import and instantiate statements for translator module
// TODO(user): remember the check when implementing MsgNode
if (!pySrcOptions.getTranslationClass().isEmpty()) {
NamespaceAndName namespaceAndName =
NamespaceAndName.fromModule(pySrcOptions.getTranslationClass());
String translationName = namespaceAndName.name();
pyCodeBuilder.appendLine(
"from ", namespaceAndName.namespace(), " import ", translationName);
pyCodeBuilder.appendLine(PyExprUtils.TRANSLATOR_NAME, " = ", translationName, "()");
}
}
/**
* Helper for visitSoyFileNode(SoyFileNode) to add code to require Soy namespaces.
*
* @param soyFile The node we're visiting.
*/
private void addCodeToRequireSoyNamespaces(SoyFileNode soyFile) {
SortedSet<String> calleeModules =
soyFile.getImports().stream()
.filter(i -> i.getImportType() == ImportType.TEMPLATE)
.map(i -> fileSetMetadata.getNamespaceForPath(i.getSourceFilePath()))
.collect(toCollection(TreeSet::new));
for (String calleeModule : calleeModules) {
NamespaceAndName namespaceAndName = NamespaceAndName.fromModule(calleeModule);
if (namespaceManifest.containsKey(calleeModule)) {
pyCodeBuilder.appendLine(
"import ", namespaceManifest.get(calleeModule), " as ", namespaceAndName.name());
} else {
pyCodeBuilder.appendLineStart(
namespaceAndName.name(),
" = runtime.namespaced_import('",
namespaceAndName.name(),
"', namespace='",
namespaceAndName.namespace(),
"'");
if (!pySrcOptions.getEnvironmentModulePath().isEmpty()) {
pyCodeBuilder
.append(", environment_path='")
.append(pySrcOptions.getEnvironmentModulePath(), "'");
}
pyCodeBuilder.appendLineEnd(")");
}
}
// Store the entire manifest for use at runtime.
pyCodeBuilder.appendLine("NAMESPACE_MANIFEST = {");
pyCodeBuilder.increaseIndentTwice();
for (Map.Entry<String, String> entry : namespaceManifest.entrySet()) {
pyCodeBuilder.appendLine("'", entry.getKey(), "': '", entry.getValue(), "',");
}
pyCodeBuilder.decreaseIndentTwice();
pyCodeBuilder.appendLine("}");
pyCodeBuilder.appendLine();
}
/**
* Helper for visitSoyFileNode(SoyFileNode) to add code to turn byte strings into unicode
* strings for Python 2.
*/
private void addCodeToFixUnicodeStrings() {
pyCodeBuilder.appendLine("try:");
pyCodeBuilder.increaseIndent();
pyCodeBuilder.appendLine("str = unicode");
pyCodeBuilder.decreaseIndent();
pyCodeBuilder.appendLine("except NameError:");
pyCodeBuilder.increaseIndent();
pyCodeBuilder.appendLine("pass");
pyCodeBuilder.decreaseIndent();
pyCodeBuilder.appendLine();
}
/** Helper for visitTemplateNode which generates the function body. */
private void generateFunctionBody(TemplateNode node) {
// Add a new frame for local variable translations.
localVarExprs.pushFrame();
pyCodeBuilder.pushOutputVar("output");
visitChildren(node);
PyExpr resultPyExpr = pyCodeBuilder.getOutputAsString();
pyCodeBuilder.popOutputVar();
// Templates with autoescape="strict" return the SanitizedContent wrapper for its kind:
// - Call sites are wrapped in an escaper. Returning SanitizedContent prevents re-escaping.
// - The topmost call into Soy returns a SanitizedContent. This will make it easy to take
// the result of one template and feed it to another, and also to confidently assign sanitized
// HTML content to innerHTML. This does not use the internal-blocks variant.
resultPyExpr =
InternalPyExprUtils.wrapAsSanitizedContent(node.getContentKind(), resultPyExpr);
pyCodeBuilder.appendLine("return ", resultPyExpr.getText());
localVarExprs.popFrame();
}
}
@AutoValue
abstract static class NamespaceAndName {
static NamespaceAndName fromModule(String moduleName) {
String namespace = moduleName;
String name = moduleName;
int lastDotIndex = moduleName.lastIndexOf('.');
if (lastDotIndex != -1) {
namespace = moduleName.substring(0, lastDotIndex);
name = moduleName.substring(lastDotIndex + 1);
}
return new AutoValue_GenPyCodeVisitor_NamespaceAndName(namespace, name);
}
abstract String namespace();
abstract String name();
}
}
|
googleapis/google-api-java-client-services | 36,009 | clients/google-api-services-walletobjects/v1/2.0.0/com/google/api/services/walletobjects/model/FlightObject.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.walletobjects.model;
/**
* Model definition for FlightObject.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Google Wallet API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class FlightObject extends com.google.api.client.json.GenericJson {
/**
* Optional app or website link that will be displayed as a button on the front of the pass. If
* AppLinkData is provided for the corresponding class only object AppLinkData will be displayed.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AppLinkData appLinkData;
/**
* The barcode type and value.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Barcode barcode;
/**
* Passenger specific information about boarding and seating.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private BoardingAndSeatingInfo boardingAndSeatingInfo;
/**
* Required. The class associated with this object. The class must be of the same type as this
* object, must already exist, and must be approved. Class IDs should follow the format issuer
* ID.identifier where the former is issued by Google and latter is chosen by you.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String classId;
/**
* A copy of the inherited fields of the parent class. These fields are retrieved during a GET.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private FlightClass classReference;
/**
* Indicates if notifications should explicitly be suppressed. If this field is set to true,
* regardless of the `messages` field, expiration notifications to the user will be suppressed. By
* default, this field is set to false. Currently, this can only be set for Flights.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean disableExpirationNotification;
/**
* Information that controls how passes are grouped together.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GroupingInfo groupingInfo;
/**
* Whether this object is currently linked to a single device. This field is set by the platform
* when a user saves the object, linking it to their device. Intended for use by select partners.
* Contact support for additional information.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean hasLinkedDevice;
/**
* Indicates if the object has users. This field is set by the platform.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean hasUsers;
/**
* Optional banner image displayed on the front of the card. If none is present, hero image of the
* class, if present, will be displayed. If hero image of the class is also not present, nothing
* will be displayed.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Image heroImage;
/**
* The background color for the card. If not set the dominant color of the hero image is used, and
* if no hero image is set, the dominant color of the logo is used. The format is #rrggbb where
* rrggbb is a hex RGB triplet, such as `#ffcc00`. You can also use the shorthand version of the
* RGB triplet which is #rgb, such as `#fc0`.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String hexBackgroundColor;
/**
* Required. The unique identifier for an object. This ID must be unique across all objects from
* an issuer. This value should follow the format issuer ID.identifier where the former is issued
* by Google and latter is chosen by you. The unique identifier should only include alphanumeric
* characters, '.', '_', or '-'.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String id;
/**
* Image module data. The maximum number of these fields displayed is 1 from object level and 1
* for class object level.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<ImageModuleData> imageModulesData;
/**
* Deprecated. Use textModulesData instead.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private InfoModuleData infoModuleData;
/**
* Identifies what kind of resource this is. Value: the fixed string
* `"walletobjects#flightObject"`.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* linked_object_ids are a list of other objects such as event ticket, loyalty, offer, generic,
* giftcard, transit and boarding pass that should be automatically attached to this flight
* object. If a user had saved this boarding pass, then these linked_object_ids would be
* automatically pushed to the user's wallet (unless they turned off the setting to receive such
* linked passes). Make sure that objects present in linked_object_ids are already inserted - if
* not, calls would fail. Once linked, the linked objects cannot be unlinked. You cannot link
* objects belonging to another issuer. There is a limit to the number of objects that can be
* linked to a single object. After the limit is reached, new linked objects in the call will be
* ignored silently. Object IDs should follow the format issuer ID. identifier where the former is
* issued by Google and the latter is chosen by you.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> linkedObjectIds;
/**
* Links module data. If links module data is also defined on the class, both will be displayed.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LinksModuleData linksModuleData;
/**
* Note: This field is currently not supported to trigger geo notifications.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<LatLongPoint> locations;
/**
* Merchant locations. There is a maximum of ten on the object. Any additional MerchantLocations
* added beyond the 10 will be rejected. These locations will trigger a notification when a user
* enters within a Google-set radius of the point. This field replaces the deprecated
* LatLongPoints.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<MerchantLocation> merchantLocations;
/**
* An array of messages displayed in the app. All users of this object will receive its associated
* messages. The maximum number of these fields is 10.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<Message> messages;
/**
* Whether or not field updates to this object should trigger notifications. When set to NOTIFY,
* we will attempt to trigger a field update notification to users. These notifications will only
* be sent to users if the field is part of an allowlist. If set to DO_NOT_NOTIFY or
* NOTIFICATION_SETTINGS_UNSPECIFIED, no notification will be triggered. This setting is ephemeral
* and needs to be set with each PATCH or UPDATE request, otherwise a notification will not be
* triggered.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String notifyPreference;
/**
* Pass constraints for the object. Includes limiting NFC and screenshot behaviors.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private PassConstraints passConstraints;
/**
* Required. Passenger name as it would appear on the boarding pass. eg: "Dave M Gahan" or
* "Gahan/Dave" or "GAHAN/DAVEM"
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String passengerName;
/**
* Required. Information about flight reservation.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ReservationInfo reservationInfo;
/**
* The rotating barcode type and value.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private RotatingBarcode rotatingBarcode;
/**
* Restrictions on the object that needs to be verified before the user tries to save the pass.
* Note that this restrictions will only be applied during save time. If the restrictions changed
* after a user saves the pass, the new restrictions will not be applied to an already saved pass.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private SaveRestrictions saveRestrictions;
/**
* An image for the security program that applies to the passenger.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Image securityProgramLogo;
/**
* The value that will be transmitted to a Smart Tap certified terminal over NFC for this object.
* The class level fields `enableSmartTap` and `redemptionIssuers` must also be set up correctly
* in order for the pass to support Smart Tap. Only ASCII characters are supported.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String smartTapRedemptionValue;
/**
* Required. The state of the object. This field is used to determine how an object is displayed
* in the app. For example, an `inactive` object is moved to the "Expired passes" section.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String state;
/**
* Text module data. If text module data is also defined on the class, both will be displayed. The
* maximum number of these fields displayed is 10 from the object and 10 from the class.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<TextModuleData> textModulesData;
/**
* The time period this object will be `active` and object can be used. An object's state will be
* changed to `expired` when this time period has passed.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private TimeInterval validTimeInterval;
/**
* Optional value added module data. Maximum of ten on the object.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<ValueAddedModuleData> valueAddedModuleData;
/**
* Deprecated
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long version;
/**
* Optional app or website link that will be displayed as a button on the front of the pass. If
* AppLinkData is provided for the corresponding class only object AppLinkData will be displayed.
* @return value or {@code null} for none
*/
public AppLinkData getAppLinkData() {
return appLinkData;
}
/**
* Optional app or website link that will be displayed as a button on the front of the pass. If
* AppLinkData is provided for the corresponding class only object AppLinkData will be displayed.
* @param appLinkData appLinkData or {@code null} for none
*/
public FlightObject setAppLinkData(AppLinkData appLinkData) {
this.appLinkData = appLinkData;
return this;
}
/**
* The barcode type and value.
* @return value or {@code null} for none
*/
public Barcode getBarcode() {
return barcode;
}
/**
* The barcode type and value.
* @param barcode barcode or {@code null} for none
*/
public FlightObject setBarcode(Barcode barcode) {
this.barcode = barcode;
return this;
}
/**
* Passenger specific information about boarding and seating.
* @return value or {@code null} for none
*/
public BoardingAndSeatingInfo getBoardingAndSeatingInfo() {
return boardingAndSeatingInfo;
}
/**
* Passenger specific information about boarding and seating.
* @param boardingAndSeatingInfo boardingAndSeatingInfo or {@code null} for none
*/
public FlightObject setBoardingAndSeatingInfo(BoardingAndSeatingInfo boardingAndSeatingInfo) {
this.boardingAndSeatingInfo = boardingAndSeatingInfo;
return this;
}
/**
* Required. The class associated with this object. The class must be of the same type as this
* object, must already exist, and must be approved. Class IDs should follow the format issuer
* ID.identifier where the former is issued by Google and latter is chosen by you.
* @return value or {@code null} for none
*/
public java.lang.String getClassId() {
return classId;
}
/**
* Required. The class associated with this object. The class must be of the same type as this
* object, must already exist, and must be approved. Class IDs should follow the format issuer
* ID.identifier where the former is issued by Google and latter is chosen by you.
* @param classId classId or {@code null} for none
*/
public FlightObject setClassId(java.lang.String classId) {
this.classId = classId;
return this;
}
/**
* A copy of the inherited fields of the parent class. These fields are retrieved during a GET.
* @return value or {@code null} for none
*/
public FlightClass getClassReference() {
return classReference;
}
/**
* A copy of the inherited fields of the parent class. These fields are retrieved during a GET.
* @param classReference classReference or {@code null} for none
*/
public FlightObject setClassReference(FlightClass classReference) {
this.classReference = classReference;
return this;
}
/**
* Indicates if notifications should explicitly be suppressed. If this field is set to true,
* regardless of the `messages` field, expiration notifications to the user will be suppressed. By
* default, this field is set to false. Currently, this can only be set for Flights.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDisableExpirationNotification() {
return disableExpirationNotification;
}
/**
* Indicates if notifications should explicitly be suppressed. If this field is set to true,
* regardless of the `messages` field, expiration notifications to the user will be suppressed. By
* default, this field is set to false. Currently, this can only be set for Flights.
* @param disableExpirationNotification disableExpirationNotification or {@code null} for none
*/
public FlightObject setDisableExpirationNotification(java.lang.Boolean disableExpirationNotification) {
this.disableExpirationNotification = disableExpirationNotification;
return this;
}
/**
* Information that controls how passes are grouped together.
* @return value or {@code null} for none
*/
public GroupingInfo getGroupingInfo() {
return groupingInfo;
}
/**
* Information that controls how passes are grouped together.
* @param groupingInfo groupingInfo or {@code null} for none
*/
public FlightObject setGroupingInfo(GroupingInfo groupingInfo) {
this.groupingInfo = groupingInfo;
return this;
}
/**
* Whether this object is currently linked to a single device. This field is set by the platform
* when a user saves the object, linking it to their device. Intended for use by select partners.
* Contact support for additional information.
* @return value or {@code null} for none
*/
public java.lang.Boolean getHasLinkedDevice() {
return hasLinkedDevice;
}
/**
* Whether this object is currently linked to a single device. This field is set by the platform
* when a user saves the object, linking it to their device. Intended for use by select partners.
* Contact support for additional information.
* @param hasLinkedDevice hasLinkedDevice or {@code null} for none
*/
public FlightObject setHasLinkedDevice(java.lang.Boolean hasLinkedDevice) {
this.hasLinkedDevice = hasLinkedDevice;
return this;
}
/**
* Indicates if the object has users. This field is set by the platform.
* @return value or {@code null} for none
*/
public java.lang.Boolean getHasUsers() {
return hasUsers;
}
/**
* Indicates if the object has users. This field is set by the platform.
* @param hasUsers hasUsers or {@code null} for none
*/
public FlightObject setHasUsers(java.lang.Boolean hasUsers) {
this.hasUsers = hasUsers;
return this;
}
/**
* Optional banner image displayed on the front of the card. If none is present, hero image of the
* class, if present, will be displayed. If hero image of the class is also not present, nothing
* will be displayed.
* @return value or {@code null} for none
*/
public Image getHeroImage() {
return heroImage;
}
/**
* Optional banner image displayed on the front of the card. If none is present, hero image of the
* class, if present, will be displayed. If hero image of the class is also not present, nothing
* will be displayed.
* @param heroImage heroImage or {@code null} for none
*/
public FlightObject setHeroImage(Image heroImage) {
this.heroImage = heroImage;
return this;
}
/**
* The background color for the card. If not set the dominant color of the hero image is used, and
* if no hero image is set, the dominant color of the logo is used. The format is #rrggbb where
* rrggbb is a hex RGB triplet, such as `#ffcc00`. You can also use the shorthand version of the
* RGB triplet which is #rgb, such as `#fc0`.
* @return value or {@code null} for none
*/
public java.lang.String getHexBackgroundColor() {
return hexBackgroundColor;
}
/**
* The background color for the card. If not set the dominant color of the hero image is used, and
* if no hero image is set, the dominant color of the logo is used. The format is #rrggbb where
* rrggbb is a hex RGB triplet, such as `#ffcc00`. You can also use the shorthand version of the
* RGB triplet which is #rgb, such as `#fc0`.
* @param hexBackgroundColor hexBackgroundColor or {@code null} for none
*/
public FlightObject setHexBackgroundColor(java.lang.String hexBackgroundColor) {
this.hexBackgroundColor = hexBackgroundColor;
return this;
}
/**
* Required. The unique identifier for an object. This ID must be unique across all objects from
* an issuer. This value should follow the format issuer ID.identifier where the former is issued
* by Google and latter is chosen by you. The unique identifier should only include alphanumeric
* characters, '.', '_', or '-'.
* @return value or {@code null} for none
*/
public java.lang.String getId() {
return id;
}
/**
* Required. The unique identifier for an object. This ID must be unique across all objects from
* an issuer. This value should follow the format issuer ID.identifier where the former is issued
* by Google and latter is chosen by you. The unique identifier should only include alphanumeric
* characters, '.', '_', or '-'.
* @param id id or {@code null} for none
*/
public FlightObject setId(java.lang.String id) {
this.id = id;
return this;
}
/**
* Image module data. The maximum number of these fields displayed is 1 from object level and 1
* for class object level.
* @return value or {@code null} for none
*/
public java.util.List<ImageModuleData> getImageModulesData() {
return imageModulesData;
}
/**
* Image module data. The maximum number of these fields displayed is 1 from object level and 1
* for class object level.
* @param imageModulesData imageModulesData or {@code null} for none
*/
public FlightObject setImageModulesData(java.util.List<ImageModuleData> imageModulesData) {
this.imageModulesData = imageModulesData;
return this;
}
/**
* Deprecated. Use textModulesData instead.
* @return value or {@code null} for none
*/
public InfoModuleData getInfoModuleData() {
return infoModuleData;
}
/**
* Deprecated. Use textModulesData instead.
* @param infoModuleData infoModuleData or {@code null} for none
*/
public FlightObject setInfoModuleData(InfoModuleData infoModuleData) {
this.infoModuleData = infoModuleData;
return this;
}
/**
* Identifies what kind of resource this is. Value: the fixed string
* `"walletobjects#flightObject"`.
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* Identifies what kind of resource this is. Value: the fixed string
* `"walletobjects#flightObject"`.
* @param kind kind or {@code null} for none
*/
public FlightObject setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* linked_object_ids are a list of other objects such as event ticket, loyalty, offer, generic,
* giftcard, transit and boarding pass that should be automatically attached to this flight
* object. If a user had saved this boarding pass, then these linked_object_ids would be
* automatically pushed to the user's wallet (unless they turned off the setting to receive such
* linked passes). Make sure that objects present in linked_object_ids are already inserted - if
* not, calls would fail. Once linked, the linked objects cannot be unlinked. You cannot link
* objects belonging to another issuer. There is a limit to the number of objects that can be
* linked to a single object. After the limit is reached, new linked objects in the call will be
* ignored silently. Object IDs should follow the format issuer ID. identifier where the former is
* issued by Google and the latter is chosen by you.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getLinkedObjectIds() {
return linkedObjectIds;
}
/**
* linked_object_ids are a list of other objects such as event ticket, loyalty, offer, generic,
* giftcard, transit and boarding pass that should be automatically attached to this flight
* object. If a user had saved this boarding pass, then these linked_object_ids would be
* automatically pushed to the user's wallet (unless they turned off the setting to receive such
* linked passes). Make sure that objects present in linked_object_ids are already inserted - if
* not, calls would fail. Once linked, the linked objects cannot be unlinked. You cannot link
* objects belonging to another issuer. There is a limit to the number of objects that can be
* linked to a single object. After the limit is reached, new linked objects in the call will be
* ignored silently. Object IDs should follow the format issuer ID. identifier where the former is
* issued by Google and the latter is chosen by you.
* @param linkedObjectIds linkedObjectIds or {@code null} for none
*/
public FlightObject setLinkedObjectIds(java.util.List<java.lang.String> linkedObjectIds) {
this.linkedObjectIds = linkedObjectIds;
return this;
}
/**
* Links module data. If links module data is also defined on the class, both will be displayed.
* @return value or {@code null} for none
*/
public LinksModuleData getLinksModuleData() {
return linksModuleData;
}
/**
* Links module data. If links module data is also defined on the class, both will be displayed.
* @param linksModuleData linksModuleData or {@code null} for none
*/
public FlightObject setLinksModuleData(LinksModuleData linksModuleData) {
this.linksModuleData = linksModuleData;
return this;
}
/**
* Note: This field is currently not supported to trigger geo notifications.
* @return value or {@code null} for none
*/
public java.util.List<LatLongPoint> getLocations() {
return locations;
}
/**
* Note: This field is currently not supported to trigger geo notifications.
* @param locations locations or {@code null} for none
*/
public FlightObject setLocations(java.util.List<LatLongPoint> locations) {
this.locations = locations;
return this;
}
/**
* Merchant locations. There is a maximum of ten on the object. Any additional MerchantLocations
* added beyond the 10 will be rejected. These locations will trigger a notification when a user
* enters within a Google-set radius of the point. This field replaces the deprecated
* LatLongPoints.
* @return value or {@code null} for none
*/
public java.util.List<MerchantLocation> getMerchantLocations() {
return merchantLocations;
}
/**
* Merchant locations. There is a maximum of ten on the object. Any additional MerchantLocations
* added beyond the 10 will be rejected. These locations will trigger a notification when a user
* enters within a Google-set radius of the point. This field replaces the deprecated
* LatLongPoints.
* @param merchantLocations merchantLocations or {@code null} for none
*/
public FlightObject setMerchantLocations(java.util.List<MerchantLocation> merchantLocations) {
this.merchantLocations = merchantLocations;
return this;
}
/**
* An array of messages displayed in the app. All users of this object will receive its associated
* messages. The maximum number of these fields is 10.
* @return value or {@code null} for none
*/
public java.util.List<Message> getMessages() {
return messages;
}
/**
* An array of messages displayed in the app. All users of this object will receive its associated
* messages. The maximum number of these fields is 10.
* @param messages messages or {@code null} for none
*/
public FlightObject setMessages(java.util.List<Message> messages) {
this.messages = messages;
return this;
}
/**
* Whether or not field updates to this object should trigger notifications. When set to NOTIFY,
* we will attempt to trigger a field update notification to users. These notifications will only
* be sent to users if the field is part of an allowlist. If set to DO_NOT_NOTIFY or
* NOTIFICATION_SETTINGS_UNSPECIFIED, no notification will be triggered. This setting is ephemeral
* and needs to be set with each PATCH or UPDATE request, otherwise a notification will not be
* triggered.
* @return value or {@code null} for none
*/
public java.lang.String getNotifyPreference() {
return notifyPreference;
}
/**
* Whether or not field updates to this object should trigger notifications. When set to NOTIFY,
* we will attempt to trigger a field update notification to users. These notifications will only
* be sent to users if the field is part of an allowlist. If set to DO_NOT_NOTIFY or
* NOTIFICATION_SETTINGS_UNSPECIFIED, no notification will be triggered. This setting is ephemeral
* and needs to be set with each PATCH or UPDATE request, otherwise a notification will not be
* triggered.
* @param notifyPreference notifyPreference or {@code null} for none
*/
public FlightObject setNotifyPreference(java.lang.String notifyPreference) {
this.notifyPreference = notifyPreference;
return this;
}
/**
* Pass constraints for the object. Includes limiting NFC and screenshot behaviors.
* @return value or {@code null} for none
*/
public PassConstraints getPassConstraints() {
return passConstraints;
}
/**
* Pass constraints for the object. Includes limiting NFC and screenshot behaviors.
* @param passConstraints passConstraints or {@code null} for none
*/
public FlightObject setPassConstraints(PassConstraints passConstraints) {
this.passConstraints = passConstraints;
return this;
}
/**
* Required. Passenger name as it would appear on the boarding pass. eg: "Dave M Gahan" or
* "Gahan/Dave" or "GAHAN/DAVEM"
* @return value or {@code null} for none
*/
public java.lang.String getPassengerName() {
return passengerName;
}
/**
* Required. Passenger name as it would appear on the boarding pass. eg: "Dave M Gahan" or
* "Gahan/Dave" or "GAHAN/DAVEM"
* @param passengerName passengerName or {@code null} for none
*/
public FlightObject setPassengerName(java.lang.String passengerName) {
this.passengerName = passengerName;
return this;
}
/**
* Required. Information about flight reservation.
* @return value or {@code null} for none
*/
public ReservationInfo getReservationInfo() {
return reservationInfo;
}
/**
* Required. Information about flight reservation.
* @param reservationInfo reservationInfo or {@code null} for none
*/
public FlightObject setReservationInfo(ReservationInfo reservationInfo) {
this.reservationInfo = reservationInfo;
return this;
}
/**
* The rotating barcode type and value.
* @return value or {@code null} for none
*/
public RotatingBarcode getRotatingBarcode() {
return rotatingBarcode;
}
/**
* The rotating barcode type and value.
* @param rotatingBarcode rotatingBarcode or {@code null} for none
*/
public FlightObject setRotatingBarcode(RotatingBarcode rotatingBarcode) {
this.rotatingBarcode = rotatingBarcode;
return this;
}
/**
* Restrictions on the object that needs to be verified before the user tries to save the pass.
* Note that this restrictions will only be applied during save time. If the restrictions changed
* after a user saves the pass, the new restrictions will not be applied to an already saved pass.
* @return value or {@code null} for none
*/
public SaveRestrictions getSaveRestrictions() {
return saveRestrictions;
}
/**
* Restrictions on the object that needs to be verified before the user tries to save the pass.
* Note that this restrictions will only be applied during save time. If the restrictions changed
* after a user saves the pass, the new restrictions will not be applied to an already saved pass.
* @param saveRestrictions saveRestrictions or {@code null} for none
*/
public FlightObject setSaveRestrictions(SaveRestrictions saveRestrictions) {
this.saveRestrictions = saveRestrictions;
return this;
}
/**
* An image for the security program that applies to the passenger.
* @return value or {@code null} for none
*/
public Image getSecurityProgramLogo() {
return securityProgramLogo;
}
/**
* An image for the security program that applies to the passenger.
* @param securityProgramLogo securityProgramLogo or {@code null} for none
*/
public FlightObject setSecurityProgramLogo(Image securityProgramLogo) {
this.securityProgramLogo = securityProgramLogo;
return this;
}
/**
* The value that will be transmitted to a Smart Tap certified terminal over NFC for this object.
* The class level fields `enableSmartTap` and `redemptionIssuers` must also be set up correctly
* in order for the pass to support Smart Tap. Only ASCII characters are supported.
* @return value or {@code null} for none
*/
public java.lang.String getSmartTapRedemptionValue() {
return smartTapRedemptionValue;
}
/**
* The value that will be transmitted to a Smart Tap certified terminal over NFC for this object.
* The class level fields `enableSmartTap` and `redemptionIssuers` must also be set up correctly
* in order for the pass to support Smart Tap. Only ASCII characters are supported.
* @param smartTapRedemptionValue smartTapRedemptionValue or {@code null} for none
*/
public FlightObject setSmartTapRedemptionValue(java.lang.String smartTapRedemptionValue) {
this.smartTapRedemptionValue = smartTapRedemptionValue;
return this;
}
/**
* Required. The state of the object. This field is used to determine how an object is displayed
* in the app. For example, an `inactive` object is moved to the "Expired passes" section.
* @return value or {@code null} for none
*/
public java.lang.String getState() {
return state;
}
/**
* Required. The state of the object. This field is used to determine how an object is displayed
* in the app. For example, an `inactive` object is moved to the "Expired passes" section.
* @param state state or {@code null} for none
*/
public FlightObject setState(java.lang.String state) {
this.state = state;
return this;
}
/**
* Text module data. If text module data is also defined on the class, both will be displayed. The
* maximum number of these fields displayed is 10 from the object and 10 from the class.
* @return value or {@code null} for none
*/
public java.util.List<TextModuleData> getTextModulesData() {
return textModulesData;
}
/**
* Text module data. If text module data is also defined on the class, both will be displayed. The
* maximum number of these fields displayed is 10 from the object and 10 from the class.
* @param textModulesData textModulesData or {@code null} for none
*/
public FlightObject setTextModulesData(java.util.List<TextModuleData> textModulesData) {
this.textModulesData = textModulesData;
return this;
}
/**
* The time period this object will be `active` and object can be used. An object's state will be
* changed to `expired` when this time period has passed.
* @return value or {@code null} for none
*/
public TimeInterval getValidTimeInterval() {
return validTimeInterval;
}
/**
* The time period this object will be `active` and object can be used. An object's state will be
* changed to `expired` when this time period has passed.
* @param validTimeInterval validTimeInterval or {@code null} for none
*/
public FlightObject setValidTimeInterval(TimeInterval validTimeInterval) {
this.validTimeInterval = validTimeInterval;
return this;
}
/**
* Optional value added module data. Maximum of ten on the object.
* @return value or {@code null} for none
*/
public java.util.List<ValueAddedModuleData> getValueAddedModuleData() {
return valueAddedModuleData;
}
/**
* Optional value added module data. Maximum of ten on the object.
* @param valueAddedModuleData valueAddedModuleData or {@code null} for none
*/
public FlightObject setValueAddedModuleData(java.util.List<ValueAddedModuleData> valueAddedModuleData) {
this.valueAddedModuleData = valueAddedModuleData;
return this;
}
/**
* Deprecated
* @return value or {@code null} for none
*/
public java.lang.Long getVersion() {
return version;
}
/**
* Deprecated
* @param version version or {@code null} for none
*/
public FlightObject setVersion(java.lang.Long version) {
this.version = version;
return this;
}
@Override
public FlightObject set(String fieldName, Object value) {
return (FlightObject) super.set(fieldName, value);
}
@Override
public FlightObject clone() {
return (FlightObject) super.clone();
}
}
|
apache/commons-lang | 36,034 | src/main/java/org/apache/commons/lang3/builder/CompareToBuilder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.lang3.builder;
import java.lang.reflect.AccessibleObject;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.Collection;
import java.util.Comparator;
import java.util.Objects;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.ObjectUtils;
/**
* Assists in implementing {@link Comparable#compareTo(Object)} methods.
*
* <p>It is consistent with {@code equals(Object)} and
* {@code hashCode()} built with {@link EqualsBuilder} and
* {@link HashCodeBuilder}.</p>
*
* <p>Two Objects that compare equal using {@code equals(Object)} should normally
* also compare equal using {@code compareTo(Object)}.</p>
*
* <p>All relevant fields should be included in the calculation of the
* comparison. Derived fields may be ignored. The same fields, in the same
* order, should be used in both {@code compareTo(Object)} and
* {@code equals(Object)}.</p>
*
* <p>To use this class write code as follows:</p>
*
* <pre>
* public class MyClass {
* String field1;
* int field2;
* boolean field3;
*
* ...
*
* public int compareTo(Object o) {
* MyClass myClass = (MyClass) o;
* return new CompareToBuilder()
* .appendSuper(super.compareTo(o)
* .append(this.field1, myClass.field1)
* .append(this.field2, myClass.field2)
* .append(this.field3, myClass.field3)
* .toComparison();
* }
* }
* </pre>
*
* <p>Values are compared in the order they are appended to the builder. If any comparison returns
* a non-zero result, then that value will be the result returned by {@code toComparison()} and all
* subsequent comparisons are skipped.</p>
*
* <p>Alternatively, there are {@link #reflectionCompare(Object, Object) reflectionCompare} methods that use
* reflection to determine the fields to append. Because fields can be private,
* {@code reflectionCompare} uses {@link java.lang.reflect.AccessibleObject#setAccessible(boolean)} to
* bypass normal access control checks. This will fail under a security manager,
* unless the appropriate permissions are set up correctly. It is also
* slower than appending explicitly.</p>
*
* <p>A typical implementation of {@code compareTo(Object)} using
* {@code reflectionCompare} looks like:</p>
* <pre>
* public int compareTo(Object o) {
* return CompareToBuilder.reflectionCompare(this, o);
* }
* </pre>
*
* <p>The reflective methods compare object fields in the order returned by
* {@link Class#getDeclaredFields()}. The fields of the class are compared first, followed by those
* of its parent classes (in order from the bottom to the top of the class hierarchy).</p>
*
* @see Comparable
* @see Object#equals(Object)
* @see Object#hashCode()
* @see EqualsBuilder
* @see HashCodeBuilder
* @since 1.0
*/
public class CompareToBuilder implements Builder<Integer> {
/**
* Appends to {@code builder} the comparison of {@code lhs}
* to {@code rhs} using the fields defined in {@code clazz}.
*
* @param lhs left-hand side object
* @param rhs right-hand side object
* @param clazz {@link Class} that defines fields to be compared
* @param builder {@link CompareToBuilder} to append to
* @param useTransients whether to compare transient fields
* @param excludeFields fields to exclude
*/
private static void reflectionAppend(
final Object lhs,
final Object rhs,
final Class<?> clazz,
final CompareToBuilder builder,
final boolean useTransients,
final String[] excludeFields) {
final Field[] fields = clazz.getDeclaredFields();
AccessibleObject.setAccessible(fields, true);
for (int i = 0; i < fields.length && builder.comparison == 0; i++) {
final Field field = fields[i];
if (!ArrayUtils.contains(excludeFields, field.getName())
&& !field.getName().contains("$")
&& (useTransients || !Modifier.isTransient(field.getModifiers()))
&& !Modifier.isStatic(field.getModifiers())) {
// IllegalAccessException can't happen. Would get a Security exception instead.
// Throw a runtime exception in case the impossible happens.
builder.append(Reflection.getUnchecked(field, lhs), Reflection.getUnchecked(field, rhs));
}
}
}
/**
* Compares two {@link Object}s via reflection.
*
* <p>Fields can be private, thus {@code AccessibleObject.setAccessible}
* is used to bypass normal access control checks. This will fail under a
* security manager unless the appropriate permissions are set.</p>
*
* <ul>
* <li>Static fields will not be compared</li>
* <li>Transient members will be not be compared, as they are likely derived
* fields</li>
* <li>Superclass fields will be compared</li>
* </ul>
*
* <p>If both {@code lhs} and {@code rhs} are {@code null},
* they are considered equal.</p>
*
* @param lhs left-hand side object
* @param rhs right-hand side object
* @return a negative integer, zero, or a positive integer as {@code lhs}
* is less than, equal to, or greater than {@code rhs}
* @throws NullPointerException if either (but not both) parameters are
* {@code null}
* @throws ClassCastException if {@code rhs} is not assignment-compatible
* with {@code lhs}
*/
public static int reflectionCompare(final Object lhs, final Object rhs) {
return reflectionCompare(lhs, rhs, false, null);
}
/**
* Compares two {@link Object}s via reflection.
*
* <p>Fields can be private, thus {@code AccessibleObject.setAccessible}
* is used to bypass normal access control checks. This will fail under a
* security manager unless the appropriate permissions are set.</p>
*
* <ul>
* <li>Static fields will not be compared</li>
* <li>If {@code compareTransients} is {@code true},
* compares transient members. Otherwise ignores them, as they
* are likely derived fields.</li>
* <li>Superclass fields will be compared</li>
* </ul>
*
* <p>If both {@code lhs} and {@code rhs} are {@code null},
* they are considered equal.</p>
*
* @param lhs left-hand side object
* @param rhs right-hand side object
* @param compareTransients whether to compare transient fields
* @return a negative integer, zero, or a positive integer as {@code lhs}
* is less than, equal to, or greater than {@code rhs}
* @throws NullPointerException if either {@code lhs} or {@code rhs}
* (but not both) is {@code null}
* @throws ClassCastException if {@code rhs} is not assignment-compatible
* with {@code lhs}
*/
public static int reflectionCompare(final Object lhs, final Object rhs, final boolean compareTransients) {
return reflectionCompare(lhs, rhs, compareTransients, null);
}
/**
* Compares two {@link Object}s via reflection.
*
* <p>Fields can be private, thus {@code AccessibleObject.setAccessible}
* is used to bypass normal access control checks. This will fail under a
* security manager unless the appropriate permissions are set.</p>
*
* <ul>
* <li>Static fields will not be compared</li>
* <li>If the {@code compareTransients} is {@code true},
* compares transient members. Otherwise ignores them, as they
* are likely derived fields.</li>
* <li>Compares superclass fields up to and including {@code reflectUpToClass}.
* If {@code reflectUpToClass} is {@code null}, compares all superclass fields.</li>
* </ul>
*
* <p>If both {@code lhs} and {@code rhs} are {@code null},
* they are considered equal.</p>
*
* @param lhs left-hand side object
* @param rhs right-hand side object
* @param compareTransients whether to compare transient fields
* @param reflectUpToClass last superclass for which fields are compared
* @param excludeFields fields to exclude
* @return a negative integer, zero, or a positive integer as {@code lhs}
* is less than, equal to, or greater than {@code rhs}
* @throws NullPointerException if either {@code lhs} or {@code rhs}
* (but not both) is {@code null}
* @throws ClassCastException if {@code rhs} is not assignment-compatible
* with {@code lhs}
* @since 2.2 (2.0 as {@code reflectionCompare(Object, Object, boolean, Class)})
*/
public static int reflectionCompare(
final Object lhs,
final Object rhs,
final boolean compareTransients,
final Class<?> reflectUpToClass,
final String... excludeFields) {
if (lhs == rhs) {
return 0;
}
Objects.requireNonNull(lhs, "lhs");
Objects.requireNonNull(rhs, "rhs");
Class<?> lhsClazz = lhs.getClass();
if (!lhsClazz.isInstance(rhs)) {
throw new ClassCastException();
}
final CompareToBuilder compareToBuilder = new CompareToBuilder();
reflectionAppend(lhs, rhs, lhsClazz, compareToBuilder, compareTransients, excludeFields);
while (lhsClazz.getSuperclass() != null && lhsClazz != reflectUpToClass) {
lhsClazz = lhsClazz.getSuperclass();
reflectionAppend(lhs, rhs, lhsClazz, compareToBuilder, compareTransients, excludeFields);
}
return compareToBuilder.toComparison();
}
/**
* Compares two {@link Object}s via reflection.
*
* <p>Fields can be private, thus {@code AccessibleObject.setAccessible}
* is used to bypass normal access control checks. This will fail under a
* security manager unless the appropriate permissions are set.</p>
*
* <ul>
* <li>Static fields will not be compared</li>
* <li>If {@code compareTransients} is {@code true},
* compares transient members. Otherwise ignores them, as they
* are likely derived fields.</li>
* <li>Superclass fields will be compared</li>
* </ul>
*
* <p>If both {@code lhs} and {@code rhs} are {@code null},
* they are considered equal.</p>
*
* @param lhs left-hand side object
* @param rhs right-hand side object
* @param excludeFields Collection of String fields to exclude
* @return a negative integer, zero, or a positive integer as {@code lhs}
* is less than, equal to, or greater than {@code rhs}
* @throws NullPointerException if either {@code lhs} or {@code rhs}
* (but not both) is {@code null}
* @throws ClassCastException if {@code rhs} is not assignment-compatible
* with {@code lhs}
* @since 2.2
*/
public static int reflectionCompare(final Object lhs, final Object rhs, final Collection<String> excludeFields) {
return reflectionCompare(lhs, rhs, ReflectionToStringBuilder.toNoNullStringArray(excludeFields));
}
/**
* Compares two {@link Object}s via reflection.
*
* <p>Fields can be private, thus {@code AccessibleObject.setAccessible}
* is used to bypass normal access control checks. This will fail under a
* security manager unless the appropriate permissions are set.</p>
*
* <ul>
* <li>Static fields will not be compared</li>
* <li>If {@code compareTransients} is {@code true},
* compares transient members. Otherwise ignores them, as they
* are likely derived fields.</li>
* <li>Superclass fields will be compared</li>
* </ul>
*
* <p>If both {@code lhs} and {@code rhs} are {@code null},
* they are considered equal.</p>
*
* @param lhs left-hand side object
* @param rhs right-hand side object
* @param excludeFields array of fields to exclude
* @return a negative integer, zero, or a positive integer as {@code lhs}
* is less than, equal to, or greater than {@code rhs}
* @throws NullPointerException if either {@code lhs} or {@code rhs}
* (but not both) is {@code null}
* @throws ClassCastException if {@code rhs} is not assignment-compatible
* with {@code lhs}
* @since 2.2
*/
public static int reflectionCompare(final Object lhs, final Object rhs, final String... excludeFields) {
return reflectionCompare(lhs, rhs, false, null, excludeFields);
}
/**
* Current state of the comparison as appended fields are checked.
*/
private int comparison;
/**
* Constructor for CompareToBuilder.
*
* <p>Starts off assuming that the objects are equal. Multiple calls are
* then made to the various append methods, followed by a call to
* {@link #toComparison} to get the result.</p>
*/
public CompareToBuilder() {
comparison = 0;
}
/**
* Appends to the {@code builder} the comparison of
* two {@code booleans}s.
*
* @param lhs left-hand side value
* @param rhs right-hand side value
* @return {@code this} instance.
*/
public CompareToBuilder append(final boolean lhs, final boolean rhs) {
if (comparison != 0) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs) {
comparison = 1;
} else {
comparison = -1;
}
return this;
}
/**
* Appends to the {@code builder} the deep comparison of
* two {@code boolean} arrays.
*
* <ol>
* <li>Check if arrays are the same using {@code ==}</li>
* <li>Check if for {@code null}, {@code null} is less than non-{@code null}</li>
* <li>Check array length, a shorter length array is less than a longer length array</li>
* <li>Check array contents element by element using {@link #append(boolean, boolean)}</li>
* </ol>
*
* @param lhs left-hand side array
* @param rhs right-hand side array
* @return {@code this} instance.
*/
public CompareToBuilder append(final boolean[] lhs, final boolean[] rhs) {
if (comparison != 0) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null) {
comparison = -1;
return this;
}
if (rhs == null) {
comparison = 1;
return this;
}
if (lhs.length != rhs.length) {
comparison = lhs.length < rhs.length ? -1 : 1;
return this;
}
for (int i = 0; i < lhs.length && comparison == 0; i++) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* Appends to the {@code builder} the comparison of
* two {@code byte}s.
*
* @param lhs left-hand side value
* @param rhs right-hand side value
* @return {@code this} instance.
*/
public CompareToBuilder append(final byte lhs, final byte rhs) {
if (comparison != 0) {
return this;
}
comparison = Byte.compare(lhs, rhs);
return this;
}
/**
* Appends to the {@code builder} the deep comparison of
* two {@code byte} arrays.
*
* <ol>
* <li>Check if arrays are the same using {@code ==}</li>
* <li>Check if for {@code null}, {@code null} is less than non-{@code null}</li>
* <li>Check array length, a shorter length array is less than a longer length array</li>
* <li>Check array contents element by element using {@link #append(byte, byte)}</li>
* </ol>
*
* @param lhs left-hand side array
* @param rhs right-hand side array
* @return {@code this} instance.
*/
public CompareToBuilder append(final byte[] lhs, final byte[] rhs) {
if (comparison != 0) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null) {
comparison = -1;
return this;
}
if (rhs == null) {
comparison = 1;
return this;
}
if (lhs.length != rhs.length) {
comparison = lhs.length < rhs.length ? -1 : 1;
return this;
}
for (int i = 0; i < lhs.length && comparison == 0; i++) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* Appends to the {@code builder} the comparison of
* two {@code char}s.
*
* @param lhs left-hand side value
* @param rhs right-hand side value
* @return {@code this} instance.
*/
public CompareToBuilder append(final char lhs, final char rhs) {
if (comparison != 0) {
return this;
}
comparison = Character.compare(lhs, rhs);
return this;
}
/**
* Appends to the {@code builder} the deep comparison of
* two {@code char} arrays.
*
* <ol>
* <li>Check if arrays are the same using {@code ==}</li>
* <li>Check if for {@code null}, {@code null} is less than non-{@code null}</li>
* <li>Check array length, a shorter length array is less than a longer length array</li>
* <li>Check array contents element by element using {@link #append(char, char)}</li>
* </ol>
*
* @param lhs left-hand side array
* @param rhs right-hand side array
* @return {@code this} instance.
*/
public CompareToBuilder append(final char[] lhs, final char[] rhs) {
if (comparison != 0) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null) {
comparison = -1;
return this;
}
if (rhs == null) {
comparison = 1;
return this;
}
if (lhs.length != rhs.length) {
comparison = lhs.length < rhs.length ? -1 : 1;
return this;
}
for (int i = 0; i < lhs.length && comparison == 0; i++) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* Appends to the {@code builder} the comparison of
* two {@code double}s.
*
* <p>This handles NaNs, Infinities, and {@code -0.0}.</p>
*
* <p>It is compatible with the hash code generated by
* {@link HashCodeBuilder}.</p>
*
* @param lhs left-hand side value
* @param rhs right-hand side value
* @return {@code this} instance.
*/
public CompareToBuilder append(final double lhs, final double rhs) {
if (comparison != 0) {
return this;
}
comparison = Double.compare(lhs, rhs);
return this;
}
/**
* Appends to the {@code builder} the deep comparison of
* two {@code double} arrays.
*
* <ol>
* <li>Check if arrays are the same using {@code ==}</li>
* <li>Check if for {@code null}, {@code null} is less than non-{@code null}</li>
* <li>Check array length, a shorter length array is less than a longer length array</li>
* <li>Check array contents element by element using {@link #append(double, double)}</li>
* </ol>
*
* @param lhs left-hand side array
* @param rhs right-hand side array
* @return {@code this} instance.
*/
public CompareToBuilder append(final double[] lhs, final double[] rhs) {
if (comparison != 0) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null) {
comparison = -1;
return this;
}
if (rhs == null) {
comparison = 1;
return this;
}
if (lhs.length != rhs.length) {
comparison = lhs.length < rhs.length ? -1 : 1;
return this;
}
for (int i = 0; i < lhs.length && comparison == 0; i++) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* Appends to the {@code builder} the comparison of
* two {@code float}s.
*
* <p>This handles NaNs, Infinities, and {@code -0.0}.</p>
*
* <p>It is compatible with the hash code generated by
* {@link HashCodeBuilder}.</p>
*
* @param lhs left-hand side value
* @param rhs right-hand side value
* @return {@code this} instance.
*/
public CompareToBuilder append(final float lhs, final float rhs) {
if (comparison != 0) {
return this;
}
comparison = Float.compare(lhs, rhs);
return this;
}
/**
* Appends to the {@code builder} the deep comparison of
* two {@code float} arrays.
*
* <ol>
* <li>Check if arrays are the same using {@code ==}</li>
* <li>Check if for {@code null}, {@code null} is less than non-{@code null}</li>
* <li>Check array length, a shorter length array is less than a longer length array</li>
* <li>Check array contents element by element using {@link #append(float, float)}</li>
* </ol>
*
* @param lhs left-hand side array
* @param rhs right-hand side array
* @return {@code this} instance.
*/
public CompareToBuilder append(final float[] lhs, final float[] rhs) {
if (comparison != 0) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null) {
comparison = -1;
return this;
}
if (rhs == null) {
comparison = 1;
return this;
}
if (lhs.length != rhs.length) {
comparison = lhs.length < rhs.length ? -1 : 1;
return this;
}
for (int i = 0; i < lhs.length && comparison == 0; i++) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* Appends to the {@code builder} the comparison of
* two {@code int}s.
*
* @param lhs left-hand side value
* @param rhs right-hand side value
* @return {@code this} instance.
*/
public CompareToBuilder append(final int lhs, final int rhs) {
if (comparison != 0) {
return this;
}
comparison = Integer.compare(lhs, rhs);
return this;
}
/**
* Appends to the {@code builder} the deep comparison of
* two {@code int} arrays.
*
* <ol>
* <li>Check if arrays are the same using {@code ==}</li>
* <li>Check if for {@code null}, {@code null} is less than non-{@code null}</li>
* <li>Check array length, a shorter length array is less than a longer length array</li>
* <li>Check array contents element by element using {@link #append(int, int)}</li>
* </ol>
*
* @param lhs left-hand side array
* @param rhs right-hand side array
* @return {@code this} instance.
*/
public CompareToBuilder append(final int[] lhs, final int[] rhs) {
if (comparison != 0) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null) {
comparison = -1;
return this;
}
if (rhs == null) {
comparison = 1;
return this;
}
if (lhs.length != rhs.length) {
comparison = lhs.length < rhs.length ? -1 : 1;
return this;
}
for (int i = 0; i < lhs.length && comparison == 0; i++) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* Appends to the {@code builder} the comparison of
* two {@code long}s.
*
* @param lhs left-hand side value
* @param rhs right-hand side value
* @return {@code this} instance.
*/
public CompareToBuilder append(final long lhs, final long rhs) {
if (comparison != 0) {
return this;
}
comparison = Long.compare(lhs, rhs);
return this;
}
/**
* Appends to the {@code builder} the deep comparison of
* two {@code long} arrays.
*
* <ol>
* <li>Check if arrays are the same using {@code ==}</li>
* <li>Check if for {@code null}, {@code null} is less than non-{@code null}</li>
* <li>Check array length, a shorter length array is less than a longer length array</li>
* <li>Check array contents element by element using {@link #append(long, long)}</li>
* </ol>
*
* @param lhs left-hand side array
* @param rhs right-hand side array
* @return {@code this} instance.
*/
public CompareToBuilder append(final long[] lhs, final long[] rhs) {
if (comparison != 0) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null) {
comparison = -1;
return this;
}
if (rhs == null) {
comparison = 1;
return this;
}
if (lhs.length != rhs.length) {
comparison = lhs.length < rhs.length ? -1 : 1;
return this;
}
for (int i = 0; i < lhs.length && comparison == 0; i++) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* Appends to the {@code builder} the comparison of
* two {@link Object}s.
*
* <ol>
* <li>Check if {@code lhs == rhs}</li>
* <li>Check if either {@code lhs} or {@code rhs} is {@code null},
* a {@code null} object is less than a non-{@code null} object</li>
* <li>Check the object contents</li>
* </ol>
*
* <p>{@code lhs} must either be an array or implement {@link Comparable}.</p>
*
* @param lhs left-hand side object
* @param rhs right-hand side object
* @return {@code this} instance.
* @throws ClassCastException if {@code rhs} is not assignment-compatible
* with {@code lhs}
*/
public CompareToBuilder append(final Object lhs, final Object rhs) {
return append(lhs, rhs, null);
}
/**
* Appends to the {@code builder} the comparison of
* two {@link Object}s.
*
* <ol>
* <li>Check if {@code lhs == rhs}</li>
* <li>Check if either {@code lhs} or {@code rhs} is {@code null},
* a {@code null} object is less than a non-{@code null} object</li>
* <li>Check the object contents</li>
* </ol>
*
* <p>If {@code lhs} is an array, array comparison methods will be used.
* Otherwise {@code comparator} will be used to compare the objects.
* If {@code comparator} is {@code null}, {@code lhs} must
* implement {@link Comparable} instead.</p>
*
* @param lhs left-hand side object
* @param rhs right-hand side object
* @param comparator {@link Comparator} used to compare the objects,
* {@code null} means treat lhs as {@link Comparable}
* @return {@code this} instance.
* @throws ClassCastException if {@code rhs} is not assignment-compatible
* with {@code lhs}
* @since 2.0
*/
public CompareToBuilder append(final Object lhs, final Object rhs, final Comparator<?> comparator) {
if (comparison != 0) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null) {
comparison = -1;
return this;
}
if (rhs == null) {
comparison = 1;
return this;
}
if (ObjectUtils.isArray(lhs)) {
// factor out array case in order to keep method small enough to be inlined
appendArray(lhs, rhs, comparator);
} else // the simple case, not an array, just test the element
if (comparator == null) {
@SuppressWarnings("unchecked") // assume this can be done; if not throw CCE as per Javadoc
final Comparable<Object> comparable = (Comparable<Object>) lhs;
comparison = comparable.compareTo(rhs);
} else {
@SuppressWarnings("unchecked") // assume this can be done; if not throw CCE as per Javadoc
final Comparator<Object> comparator2 = (Comparator<Object>) comparator;
comparison = comparator2.compare(lhs, rhs);
}
return this;
}
/**
* Appends to the {@code builder} the deep comparison of
* two {@link Object} arrays.
*
* <ol>
* <li>Check if arrays are the same using {@code ==}</li>
* <li>Check if for {@code null}, {@code null} is less than non-{@code null}</li>
* <li>Check array length, a short length array is less than a long length array</li>
* <li>Check array contents element by element using {@link #append(Object, Object, Comparator)}</li>
* </ol>
*
* <p>This method will also will be called for the top level of multi-dimensional,
* ragged, and multi-typed arrays.</p>
*
* @param lhs left-hand side array
* @param rhs right-hand side array
* @return {@code this} instance.
* @throws ClassCastException if {@code rhs} is not assignment-compatible
* with {@code lhs}
*/
public CompareToBuilder append(final Object[] lhs, final Object[] rhs) {
return append(lhs, rhs, null);
}
/**
* Appends to the {@code builder} the deep comparison of
* two {@link Object} arrays.
*
* <ol>
* <li>Check if arrays are the same using {@code ==}</li>
* <li>Check if for {@code null}, {@code null} is less than non-{@code null}</li>
* <li>Check array length, a short length array is less than a long length array</li>
* <li>Check array contents element by element using {@link #append(Object, Object, Comparator)}</li>
* </ol>
*
* <p>This method will also will be called for the top level of multi-dimensional,
* ragged, and multi-typed arrays.</p>
*
* @param lhs left-hand side array
* @param rhs right-hand side array
* @param comparator {@link Comparator} to use to compare the array elements,
* {@code null} means to treat {@code lhs} elements as {@link Comparable}.
* @return {@code this} instance.
* @throws ClassCastException if {@code rhs} is not assignment-compatible
* with {@code lhs}
* @since 2.0
*/
public CompareToBuilder append(final Object[] lhs, final Object[] rhs, final Comparator<?> comparator) {
if (comparison != 0) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null) {
comparison = -1;
return this;
}
if (rhs == null) {
comparison = 1;
return this;
}
if (lhs.length != rhs.length) {
comparison = lhs.length < rhs.length ? -1 : 1;
return this;
}
for (int i = 0; i < lhs.length && comparison == 0; i++) {
append(lhs[i], rhs[i], comparator);
}
return this;
}
/**
* Appends to the {@code builder} the comparison of
* two {@code short}s.
*
* @param lhs left-hand side value
* @param rhs right-hand side value
* @return {@code this} instance.
*/
public CompareToBuilder append(final short lhs, final short rhs) {
if (comparison != 0) {
return this;
}
comparison = Short.compare(lhs, rhs);
return this;
}
/**
* Appends to the {@code builder} the deep comparison of
* two {@code short} arrays.
*
* <ol>
* <li>Check if arrays are the same using {@code ==}</li>
* <li>Check if for {@code null}, {@code null} is less than non-{@code null}</li>
* <li>Check array length, a shorter length array is less than a longer length array</li>
* <li>Check array contents element by element using {@link #append(short, short)}</li>
* </ol>
*
* @param lhs left-hand side array
* @param rhs right-hand side array
* @return {@code this} instance.
*/
public CompareToBuilder append(final short[] lhs, final short[] rhs) {
if (comparison != 0) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null) {
comparison = -1;
return this;
}
if (rhs == null) {
comparison = 1;
return this;
}
if (lhs.length != rhs.length) {
comparison = lhs.length < rhs.length ? -1 : 1;
return this;
}
for (int i = 0; i < lhs.length && comparison == 0; i++) {
append(lhs[i], rhs[i]);
}
return this;
}
private void appendArray(final Object lhs, final Object rhs, final Comparator<?> comparator) {
// switch on type of array, to dispatch to the correct handler
// handles multidimensional arrays
// throws a ClassCastException if rhs is not the correct array type
if (lhs instanceof long[]) {
append((long[]) lhs, (long[]) rhs);
} else if (lhs instanceof int[]) {
append((int[]) lhs, (int[]) rhs);
} else if (lhs instanceof short[]) {
append((short[]) lhs, (short[]) rhs);
} else if (lhs instanceof char[]) {
append((char[]) lhs, (char[]) rhs);
} else if (lhs instanceof byte[]) {
append((byte[]) lhs, (byte[]) rhs);
} else if (lhs instanceof double[]) {
append((double[]) lhs, (double[]) rhs);
} else if (lhs instanceof float[]) {
append((float[]) lhs, (float[]) rhs);
} else if (lhs instanceof boolean[]) {
append((boolean[]) lhs, (boolean[]) rhs);
} else {
// not an array of primitives
// throws a ClassCastException if rhs is not an array
append((Object[]) lhs, (Object[]) rhs, comparator);
}
}
/**
* Appends to the {@code builder} the {@code compareTo(Object)}
* result of the superclass.
*
* @param superCompareTo result of calling {@code super.compareTo(Object)}
* @return {@code this} instance.
* @since 2.0
*/
public CompareToBuilder appendSuper(final int superCompareTo) {
if (comparison != 0) {
return this;
}
comparison = superCompareTo;
return this;
}
/**
* Returns a negative Integer, a positive Integer, or zero as
* the {@code builder} has judged the "left-hand" side
* as less than, greater than, or equal to the "right-hand"
* side.
*
* @return final comparison result as an Integer
* @see #toComparison()
* @since 3.0
*/
@Override
public Integer build() {
return Integer.valueOf(toComparison());
}
/**
* Returns a negative integer, a positive integer, or zero as
* the {@code builder} has judged the "left-hand" side
* as less than, greater than, or equal to the "right-hand"
* side.
*
* @return final comparison result
* @see #build()
*/
public int toComparison() {
return comparison;
}
}
|
apache/hadoop | 35,968 | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/amrmproxy/AMRMProxyService.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager.amrmproxy;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.registry.client.api.RegistryOperations;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.SaslRpcServer;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.service.CompositeService;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.ApplicationMasterProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest;
import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse;
import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest;
import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.server.api.records.MasterKey;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.AsyncDispatcher;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.security.AMRMTokenIdentifier;
import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
import org.apache.hadoop.yarn.server.api.ContainerType;
import org.apache.hadoop.yarn.server.federation.utils.FederationStateStoreFacade;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.apache.hadoop.yarn.server.nodemanager.recovery.NMStateStoreService.RecoveredAMRMProxyState;
import org.apache.hadoop.yarn.server.nodemanager.scheduler.DistributedScheduler;
import org.apache.hadoop.yarn.server.nodemanager.security.authorize
.NMPolicyProvider;
import org.apache.hadoop.yarn.server.security.MasterKeyData;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.server.utils.YarnServerSecurityUtils;
import org.apache.hadoop.yarn.util.Clock;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.MonotonicClock;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.util.Preconditions;
/**
* AMRMProxyService is a service that runs on each node manager that can be used
* to intercept and inspect messages from application master to the cluster
* resource manager. It listens to messages from the application master and
* creates a request intercepting pipeline instance for each application. The
* pipeline is a chain of interceptor instances that can inspect and modify the
* request/response as needed.
*/
public class AMRMProxyService extends CompositeService implements
ApplicationMasterProtocol {
private static final Logger LOG = LoggerFactory
.getLogger(AMRMProxyService.class);
private static final String NMSS_USER_KEY = "user";
private static final String NMSS_AMRMTOKEN_KEY = "amrmtoken";
private final Clock clock = new MonotonicClock();
private Server server;
private final Context nmContext;
private final AsyncDispatcher dispatcher;
private InetSocketAddress listenerEndpoint;
private AMRMProxyTokenSecretManager secretManager;
private Map<ApplicationId, RequestInterceptorChainWrapper> applPipelineMap;
private RegistryOperations registry;
private AMRMProxyMetrics metrics;
private FederationStateStoreFacade federationFacade;
private boolean federationEnabled = false;
/**
* Creates an instance of the service.
*
* @param nmContext NM context
* @param dispatcher NM dispatcher
*/
public AMRMProxyService(Context nmContext, AsyncDispatcher dispatcher) {
super(AMRMProxyService.class.getName());
Preconditions.checkArgument(nmContext != null, "nmContext is null");
Preconditions.checkArgument(dispatcher != null, "dispatcher is null");
this.nmContext = nmContext;
this.dispatcher = dispatcher;
this.applPipelineMap = new ConcurrentHashMap<>();
this.dispatcher.register(ApplicationEventType.class, new ApplicationEventHandler());
metrics = AMRMProxyMetrics.getMetrics();
}
@Override
protected void serviceInit(Configuration conf) throws Exception {
this.secretManager =
new AMRMProxyTokenSecretManager(this.nmContext.getNMStateStore());
this.secretManager.init(conf);
if (conf.getBoolean(YarnConfiguration.AMRM_PROXY_HA_ENABLED,
YarnConfiguration.DEFAULT_AMRM_PROXY_HA_ENABLED)) {
this.registry = FederationStateStoreFacade.createInstance(conf,
YarnConfiguration.YARN_REGISTRY_CLASS,
YarnConfiguration.DEFAULT_YARN_REGISTRY_CLASS,
RegistryOperations.class);
addService(this.registry);
}
this.federationFacade = FederationStateStoreFacade.getInstance(conf);
this.federationEnabled =
conf.getBoolean(YarnConfiguration.FEDERATION_ENABLED,
YarnConfiguration.DEFAULT_FEDERATION_ENABLED);
super.serviceInit(conf);
}
@Override
protected void serviceStart() throws Exception {
LOG.info("Starting AMRMProxyService.");
Configuration conf = getConfig();
YarnRPC rpc = YarnRPC.create(conf);
UserGroupInformation.setConfiguration(conf);
this.listenerEndpoint =
conf.getSocketAddr(YarnConfiguration.AMRM_PROXY_ADDRESS,
YarnConfiguration.DEFAULT_AMRM_PROXY_ADDRESS,
YarnConfiguration.DEFAULT_AMRM_PROXY_PORT);
Configuration serverConf = new Configuration(conf);
serverConf.set(
CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
SaslRpcServer.AuthMethod.TOKEN.toString());
int numWorkerThreads =
serverConf.getInt(
YarnConfiguration.AMRM_PROXY_CLIENT_THREAD_COUNT,
YarnConfiguration.DEFAULT_AMRM_PROXY_CLIENT_THREAD_COUNT);
this.secretManager.start();
this.server =
rpc.getServer(ApplicationMasterProtocol.class, this,
listenerEndpoint, serverConf, this.secretManager,
numWorkerThreads);
if (conf.getBoolean(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, false)) {
this.server.refreshServiceAcl(conf, NMPolicyProvider.getInstance());
}
this.server.start();
LOG.info("AMRMProxyService listening on address: {}.", this.server.getListenerAddress());
super.serviceStart();
}
@Override
protected void serviceStop() throws Exception {
LOG.info("Stopping AMRMProxyService.");
if (this.server != null) {
this.server.stop();
}
this.secretManager.stop();
super.serviceStop();
}
/**
* Recover from NM state store. Called after serviceInit before serviceStart.
*
* @throws IOException if recover fails
*/
public void recover() throws IOException {
LOG.info("Recovering AMRMProxyService.");
RecoveredAMRMProxyState state =
this.nmContext.getNMStateStore().loadAMRMProxyState();
this.secretManager.recover(state);
LOG.info("Recovering {} running applications for AMRMProxy.",
state.getAppContexts().size());
for (Map.Entry<ApplicationAttemptId, Map<String, byte[]>> entry : state
.getAppContexts().entrySet()) {
ApplicationAttemptId attemptId = entry.getKey();
LOG.info("Recovering app attempt {}.", attemptId);
long startTime = clock.getTime();
// Try recover for the running application attempt
try {
String user = null;
Token<AMRMTokenIdentifier> amrmToken = null;
for (Map.Entry<String, byte[]> contextEntry : entry.getValue()
.entrySet()) {
if (contextEntry.getKey().equals(NMSS_USER_KEY)) {
user = new String(contextEntry.getValue(), StandardCharsets.UTF_8);
} else if (contextEntry.getKey().equals(NMSS_AMRMTOKEN_KEY)) {
amrmToken = new Token<>();
amrmToken.decodeFromUrlString(
new String(contextEntry.getValue(), StandardCharsets.UTF_8));
// Clear the service field, as if RM just issued the token
amrmToken.setService(new Text());
}
}
if (amrmToken == null) {
throw new IOException("No amrmToken found for app attempt " + attemptId);
}
if (user == null) {
throw new IOException("No user found for app attempt " + attemptId);
}
// Regenerate the local AMRMToken for the AM
Token<AMRMTokenIdentifier> localToken =
this.secretManager.createAndGetAMRMToken(attemptId);
// Retrieve the AM container credentials from NM context
Credentials amCred = null;
for (Container container : this.nmContext.getContainers().values()) {
LOG.debug("From NM Context container {}.", container.getContainerId());
if (container.getContainerId().getApplicationAttemptId().equals(
attemptId) && container.getContainerTokenIdentifier() != null) {
LOG.debug("Container type {}.",
container.getContainerTokenIdentifier().getContainerType());
if (container.getContainerTokenIdentifier()
.getContainerType() == ContainerType.APPLICATION_MASTER) {
LOG.info("AM container {} found in context, has credentials: {}.",
container.getContainerId(),
(container.getCredentials() != null));
amCred = container.getCredentials();
}
}
}
if (amCred == null) {
LOG.error("No credentials found for AM container of {}. "
+ "Yarn registry access might not work.", attemptId);
}
// Create the interceptor pipeline for the AM
initializePipeline(attemptId, user, amrmToken, localToken,
entry.getValue(), true, amCred);
long endTime = clock.getTime();
this.metrics.succeededRecoverRequests(endTime - startTime);
} catch (Throwable e) {
LOG.error("Exception when recovering {}, removing it from NMStateStore and move on.",
attemptId, e);
this.metrics.incrFailedAppRecoveryCount();
this.nmContext.getNMStateStore().removeAMRMProxyAppContext(attemptId);
}
}
}
/**
* This is called by the AMs started on this node to register with the RM.
* This method does the initial authorization and then forwards the request to
* the application instance specific interceptor chain.
*/
@Override
public RegisterApplicationMasterResponse registerApplicationMaster(
RegisterApplicationMasterRequest request) throws YarnException,
IOException {
this.metrics.incrRequestCount();
long startTime = clock.getTime();
try {
RequestInterceptorChainWrapper pipeline =
authorizeAndGetInterceptorChain();
LOG.info("RegisteringAM Host: {}, Port: {}, Tracking Url: {} for application {}. ",
request.getHost(), request.getRpcPort(), request.getTrackingUrl(),
pipeline.getApplicationAttemptId());
RegisterApplicationMasterResponse response =
pipeline.getRootInterceptor().registerApplicationMaster(request);
long endTime = clock.getTime();
this.metrics.succeededRegisterAMRequests(endTime - startTime);
LOG.info("RegisterAM processing finished in {} ms for application {}.",
endTime - startTime, pipeline.getApplicationAttemptId());
return response;
} catch (Throwable t) {
this.metrics.incrFailedRegisterAMRequests();
throw t;
}
}
/**
* This is called by the AMs started on this node to unregister from the RM.
* This method does the initial authorization and then forwards the request to
* the application instance specific interceptor chain.
*/
@Override
public FinishApplicationMasterResponse finishApplicationMaster(
FinishApplicationMasterRequest request) throws YarnException,
IOException {
this.metrics.incrRequestCount();
long startTime = clock.getTime();
try {
RequestInterceptorChainWrapper pipeline =
authorizeAndGetInterceptorChain();
LOG.info("Finishing application master for {}. Tracking Url: {}.",
pipeline.getApplicationAttemptId(), request.getTrackingUrl());
FinishApplicationMasterResponse response =
pipeline.getRootInterceptor().finishApplicationMaster(request);
long endTime = clock.getTime();
this.metrics.succeededFinishAMRequests(endTime - startTime);
LOG.info("FinishAM finished with isUnregistered = {} in {} ms for {}.",
response.getIsUnregistered(), endTime - startTime,
pipeline.getApplicationAttemptId());
return response;
} catch (Throwable t) {
this.metrics.incrFailedFinishAMRequests();
throw t;
}
}
/**
* This is called by the AMs started on this node to send heart beat to RM.
* This method does the initial authorization and then forwards the request to
* the application instance specific pipeline, which is a chain of request
* interceptor objects. One application request processing pipeline is created
* per AM instance.
*/
@Override
public AllocateResponse allocate(AllocateRequest request)
throws YarnException, IOException {
this.metrics.incrAllocateCount();
long startTime = clock.getTime();
try {
AMRMTokenIdentifier amrmTokenIdentifier =
YarnServerSecurityUtils.authorizeRequest();
RequestInterceptorChainWrapper pipeline =
getInterceptorChain(amrmTokenIdentifier);
AllocateResponse allocateResponse =
pipeline.getRootInterceptor().allocate(request);
updateAMRMTokens(amrmTokenIdentifier, pipeline, allocateResponse);
long endTime = clock.getTime();
this.metrics.succeededAllocateRequests(endTime - startTime);
LOG.info("Allocate processing finished in {} ms for application {}.",
endTime - startTime, pipeline.getApplicationAttemptId());
return allocateResponse;
} catch (Throwable t) {
this.metrics.incrFailedAllocateRequests();
throw t;
}
}
/**
* Callback from the ContainerManager implementation for initializing the
* application request processing pipeline.
*
* @param request - encapsulates information for starting an AM
* @throws IOException if fails
* @throws YarnException if fails
*/
public void processApplicationStartRequest(StartContainerRequest request)
throws IOException, YarnException {
this.metrics.incrRequestCount();
long startTime = clock.getTime();
try {
ContainerTokenIdentifier containerTokenIdentifierForKey =
BuilderUtils.newContainerTokenIdentifier(request.getContainerToken());
ApplicationAttemptId appAttemptId =
containerTokenIdentifierForKey.getContainerID()
.getApplicationAttemptId();
ApplicationId applicationID = appAttemptId.getApplicationId();
// Checking if application is there in federation state store only
// if federation is enabled. If
// application is submitted to router then it adds it in statestore.
// if application is not found in statestore that means its
// submitted to RM
if (!checkIfAppExistsInStateStore(applicationID)) {
return;
}
LOG.info("Callback received for initializing request processing pipeline for an AM.");
Credentials credentials = YarnServerSecurityUtils
.parseCredentials(request.getContainerLaunchContext());
Token<AMRMTokenIdentifier> amrmToken =
getFirstAMRMToken(credentials.getAllTokens());
if (amrmToken == null) {
throw new YarnRuntimeException(
"AMRMToken not found in the start container request for application:" + appAttemptId);
}
// Substitute the existing AMRM Token with a local one. Keep the rest of
// the tokens in the credentials intact.
Token<AMRMTokenIdentifier> localToken =
this.secretManager.createAndGetAMRMToken(appAttemptId);
credentials.addToken(localToken.getService(), localToken);
DataOutputBuffer dob = new DataOutputBuffer();
credentials.writeTokenStorageToStream(dob);
request.getContainerLaunchContext()
.setTokens(ByteBuffer.wrap(dob.getData(), 0, dob.getLength()));
initializePipeline(appAttemptId,
containerTokenIdentifierForKey.getApplicationSubmitter(), amrmToken,
localToken, null, false, credentials);
long endTime = clock.getTime();
this.metrics.succeededAppStartRequests(endTime - startTime);
} catch (Throwable t) {
this.metrics.incrFailedAppStartRequests();
throw t;
}
}
/**
* Initializes the request interceptor pipeline for the specified application.
*
* @param applicationAttemptId attempt id
* @param user user name
* @param amrmToken amrmToken issued by RM
* @param localToken amrmToken issued by AMRMProxy
* @param recoveredDataMap the recovered states for AMRMProxy from NMSS
* @param isRecovery whether this is to recover a previously existing pipeline
*/
protected void initializePipeline(ApplicationAttemptId applicationAttemptId,
String user, Token<AMRMTokenIdentifier> amrmToken,
Token<AMRMTokenIdentifier> localToken,
Map<String, byte[]> recoveredDataMap, boolean isRecovery,
Credentials credentials) {
RequestInterceptorChainWrapper chainWrapper = null;
synchronized (applPipelineMap) {
if (applPipelineMap
.containsKey(applicationAttemptId.getApplicationId())) {
LOG.warn("Request to start an already existing appId was received. "
+ " This can happen if an application failed and a new attempt "
+ "was created on this machine. ApplicationId: {}.", applicationAttemptId);
RequestInterceptorChainWrapper chainWrapperBackup =
this.applPipelineMap.get(applicationAttemptId.getApplicationId());
if (chainWrapperBackup != null
&& chainWrapperBackup.getApplicationAttemptId() != null
&& !chainWrapperBackup.getApplicationAttemptId()
.equals(applicationAttemptId)) {
// TODO: revisit in AMRMProxy HA in YARN-6128
// Remove the existing pipeline
LOG.info("Remove the previous pipeline for ApplicationId: {}.", applicationAttemptId);
RequestInterceptorChainWrapper pipeline =
applPipelineMap.remove(applicationAttemptId.getApplicationId());
if (!isRecovery && this.nmContext.getNMStateStore() != null) {
try {
this.nmContext.getNMStateStore()
.removeAMRMProxyAppContext(applicationAttemptId);
} catch (IOException ioe) {
LOG.error("Error removing AMRMProxy application context for {}.",
applicationAttemptId, ioe);
}
}
try {
pipeline.getRootInterceptor().shutdown();
} catch (Throwable ex) {
LOG.warn("Failed to shutdown the request processing pipeline for app: {}.",
applicationAttemptId.getApplicationId(), ex);
}
} else {
return;
}
}
chainWrapper = new RequestInterceptorChainWrapper();
this.applPipelineMap.put(applicationAttemptId.getApplicationId(),
chainWrapper);
}
// We register the pipeline instance in the map first and then initialize it
// later because chain initialization can be expensive, and we would like to
// release the lock as soon as possible to prevent other applications from
// blocking when one application's chain is initializing
LOG.info("Initializing request processing pipeline for application. "
+ " ApplicationId: {} for the user: {}.", applicationAttemptId, user);
try {
RequestInterceptor interceptorChain =
this.createRequestInterceptorChain();
interceptorChain.init(
createApplicationMasterContext(this.nmContext, applicationAttemptId,
user, amrmToken, localToken, credentials, this.registry));
if (isRecovery) {
if (recoveredDataMap == null) {
throw new YarnRuntimeException("null recoveredDataMap received for recover");
}
interceptorChain.recover(recoveredDataMap);
}
chainWrapper.init(interceptorChain, applicationAttemptId);
if (!isRecovery && this.nmContext.getNMStateStore() != null) {
try {
this.nmContext.getNMStateStore().storeAMRMProxyAppContextEntry(
applicationAttemptId, NMSS_USER_KEY, user.getBytes(StandardCharsets.UTF_8));
this.nmContext.getNMStateStore().storeAMRMProxyAppContextEntry(
applicationAttemptId, NMSS_AMRMTOKEN_KEY,
amrmToken.encodeToUrlString().getBytes(StandardCharsets.UTF_8));
} catch (IOException e) {
LOG.error("Error storing AMRMProxy application context entry for {}.",
applicationAttemptId, e);
}
}
} catch (Exception e) {
this.applPipelineMap.remove(applicationAttemptId.getApplicationId());
throw e;
}
}
/**
* Shuts down the request processing pipeline for the specified application
* attempt id.
*
* @param applicationId application id
*/
protected void stopApplication(ApplicationId applicationId) {
this.metrics.incrRequestCount();
Preconditions.checkArgument(applicationId != null, "applicationId is null");
RequestInterceptorChainWrapper pipeline =
this.applPipelineMap.remove(applicationId);
boolean isStopSuccess = true;
long startTime = clock.getTime();
if (pipeline == null) {
LOG.info("No interceptor pipeline for application {},"
+ " likely because its AM is not run in this node.", applicationId);
isStopSuccess = false;
} else {
// Remove the appAttempt in AMRMTokenSecretManager
this.secretManager.applicationMasterFinished(pipeline.getApplicationAttemptId());
LOG.info("Stopping the request processing pipeline for application: {}.", applicationId);
try {
pipeline.getRootInterceptor().shutdown();
} catch (Throwable ex) {
LOG.warn("Failed to shutdown the request processing pipeline for app: {}.",
applicationId, ex);
isStopSuccess = false;
}
// Remove the app context from NMSS after the interceptors are shutdown
if (this.nmContext.getNMStateStore() != null) {
try {
this.nmContext.getNMStateStore()
.removeAMRMProxyAppContext(pipeline.getApplicationAttemptId());
} catch (IOException e) {
LOG.error("Error removing AMRMProxy application context for {}.",
applicationId, e);
isStopSuccess = false;
}
}
}
if (isStopSuccess) {
long endTime = clock.getTime();
this.metrics.succeededAppStopRequests(endTime - startTime);
} else {
this.metrics.incrFailedAppStopRequests();
}
}
private void updateAMRMTokens(AMRMTokenIdentifier amrmTokenIdentifier,
RequestInterceptorChainWrapper pipeline,
AllocateResponse allocateResponse) {
AMRMProxyApplicationContextImpl context =
(AMRMProxyApplicationContextImpl) pipeline.getRootInterceptor().getApplicationContext();
try {
long startTime = clock.getTime();
// check to see if the RM has issued a new AMRMToken & accordingly update
// the real ARMRMToken in the current context
if (allocateResponse.getAMRMToken() != null) {
LOG.info("RM rolled master-key for amrm-tokens.");
org.apache.hadoop.yarn.api.records.Token token = allocateResponse.getAMRMToken();
// Do not propagate this info back to AM
allocateResponse.setAMRMToken(null);
org.apache.hadoop.security.token.Token<AMRMTokenIdentifier> newToken =
ConverterUtils.convertFromYarn(token, (Text) null);
// Update the AMRMToken in context map, and in NM state store if it is
// different
if (context.setAMRMToken(newToken) && this.nmContext.getNMStateStore() != null) {
this.nmContext.getNMStateStore().storeAMRMProxyAppContextEntry(
context.getApplicationAttemptId(), NMSS_AMRMTOKEN_KEY,
newToken.encodeToUrlString().getBytes(StandardCharsets.UTF_8));
}
}
// Check if the local AMRMToken is rolled up and update the context and
// response accordingly
MasterKeyData nextMasterKey = this.secretManager.getNextMasterKeyData();
if (nextMasterKey != null) {
MasterKey masterKey = nextMasterKey.getMasterKey();
if (masterKey.getKeyId() != amrmTokenIdentifier.getKeyId()) {
Token<AMRMTokenIdentifier> localToken = context.getLocalAMRMToken();
if (masterKey.getKeyId() != context.getLocalAMRMTokenKeyId()) {
LOG.info("The local AMRMToken has been rolled-over."
+ " Send new local AMRMToken back to application: {}",
pipeline.getApplicationId());
localToken = this.secretManager.createAndGetAMRMToken(
pipeline.getApplicationAttemptId());
context.setLocalAMRMToken(localToken);
}
allocateResponse
.setAMRMToken(org.apache.hadoop.yarn.api.records.Token
.newInstance(localToken.getIdentifier(), localToken
.getKind().toString(), localToken.getPassword(),
localToken.getService().toString()));
}
}
long endTime = clock.getTime();
this.metrics.succeededUpdateTokenRequests(endTime - startTime);
} catch (IOException e) {
LOG.error("Error storing AMRMProxy application context entry for {}.",
context.getApplicationAttemptId(), e);
this.metrics.incrFailedUpdateAMRMTokenRequests();
}
}
private AMRMProxyApplicationContext createApplicationMasterContext(
Context context, ApplicationAttemptId applicationAttemptId, String user,
Token<AMRMTokenIdentifier> amrmToken,
Token<AMRMTokenIdentifier> localToken, Credentials credentials,
RegistryOperations registryImpl) {
AMRMProxyApplicationContextImpl appContext =
new AMRMProxyApplicationContextImpl(context, getConfig(),
applicationAttemptId, user, amrmToken, localToken, credentials,
registryImpl);
return appContext;
}
/**
* Gets the Request interceptor chains for all the applications.
*
* @return the request interceptor chains.
*/
protected Map<ApplicationId, RequestInterceptorChainWrapper> getPipelines() {
return this.applPipelineMap;
}
/**
* This method creates and returns reference of the first interceptor in the
* chain of request interceptor instances.
*
* @return the reference of the first interceptor in the chain
*/
protected RequestInterceptor createRequestInterceptorChain() {
Configuration conf = getConfig();
List<String> interceptorClassNames = getInterceptorClassNames(conf);
RequestInterceptor pipeline = null;
RequestInterceptor current = null;
for (String interceptorClassName : interceptorClassNames) {
try {
Class<?> interceptorClass =
conf.getClassByName(interceptorClassName);
if (RequestInterceptor.class.isAssignableFrom(interceptorClass)) {
RequestInterceptor interceptorInstance =
(RequestInterceptor) ReflectionUtils.newInstance(
interceptorClass, conf);
if (pipeline == null) {
pipeline = interceptorInstance;
current = interceptorInstance;
continue;
} else {
current.setNextInterceptor(interceptorInstance);
current = interceptorInstance;
}
} else {
throw new YarnRuntimeException("Class: " + interceptorClassName
+ " not instance of "
+ RequestInterceptor.class.getCanonicalName());
}
} catch (ClassNotFoundException e) {
throw new YarnRuntimeException(
"Could not instantiate ApplicationMasterRequestInterceptor: "
+ interceptorClassName, e);
}
}
if (pipeline == null) {
throw new YarnRuntimeException(
"RequestInterceptor pipeline is not configured in the system");
}
return pipeline;
}
/**
* Returns the comma separated interceptor class names from the configuration.
*
* @param conf configuration
* @return the interceptor class names as an instance of ArrayList
*/
private List<String> getInterceptorClassNames(Configuration conf) {
String configuredInterceptorClassNames =
conf.get(
YarnConfiguration.AMRM_PROXY_INTERCEPTOR_CLASS_PIPELINE,
YarnConfiguration.DEFAULT_AMRM_PROXY_INTERCEPTOR_CLASS_PIPELINE);
List<String> interceptorClassNames = new ArrayList<>();
Collection<String> tempList =
StringUtils.getStringCollection(configuredInterceptorClassNames);
for (String item : tempList) {
interceptorClassNames.add(item.trim());
}
// Make sure DistributedScheduler is present at the beginning of the chain.
if (this.nmContext.isDistributedSchedulingEnabled()) {
interceptorClassNames.add(0, DistributedScheduler.class.getName());
}
return interceptorClassNames;
}
/**
* Authorizes the request and returns the application specific request
* processing pipeline.
*
* @return the interceptor wrapper instance
* @throws YarnException if fails
*/
private RequestInterceptorChainWrapper authorizeAndGetInterceptorChain()
throws YarnException {
AMRMTokenIdentifier tokenIdentifier =
YarnServerSecurityUtils.authorizeRequest();
return getInterceptorChain(tokenIdentifier);
}
private RequestInterceptorChainWrapper getInterceptorChain(
AMRMTokenIdentifier tokenIdentifier) throws YarnException {
ApplicationAttemptId appAttemptId =
tokenIdentifier.getApplicationAttemptId();
synchronized (this.applPipelineMap) {
if (!this.applPipelineMap.containsKey(appAttemptId.getApplicationId())) {
throw new YarnException(
"The AM request processing pipeline is not initialized for app: "
+ appAttemptId.getApplicationId());
}
return this.applPipelineMap.get(appAttemptId.getApplicationId());
}
}
boolean checkIfAppExistsInStateStore(ApplicationId applicationID) {
if (!federationEnabled) {
return true;
}
try {
// Check if app is there in state store. If app is not there then it
// throws Exception
this.federationFacade.getApplicationHomeSubCluster(applicationID);
} catch (YarnException ex) {
return false;
}
return true;
}
@SuppressWarnings("unchecked")
private Token<AMRMTokenIdentifier> getFirstAMRMToken(
Collection<Token<? extends TokenIdentifier>> allTokens) {
Iterator<Token<? extends TokenIdentifier>> iter = allTokens.iterator();
while (iter.hasNext()) {
Token<? extends TokenIdentifier> token = iter.next();
if (token.getKind().equals(AMRMTokenIdentifier.KIND_NAME)) {
return (Token<AMRMTokenIdentifier>) token;
}
}
return null;
}
@Private
public InetSocketAddress getBindAddress() {
return this.listenerEndpoint;
}
@Private
public AMRMProxyTokenSecretManager getSecretManager() {
return this.secretManager;
}
/**
* Private class for handling application stop events.
*/
class ApplicationEventHandler implements EventHandler<ApplicationEvent> {
@Override
public void handle(ApplicationEvent event) {
Application app =
AMRMProxyService.this.nmContext.getApplications().get(event.getApplicationID());
if (app != null) {
switch (event.getType()) {
case APPLICATION_RESOURCES_CLEANEDUP:
LOG.info("Application stop event received for stopping AppId: {}.",
event.getApplicationID().toString());
AMRMProxyService.this.stopApplication(event.getApplicationID());
break;
default:
LOG.debug("AMRMProxy is ignoring event: {}.", event.getType());
break;
}
} else {
LOG.warn("Event {} sent to absent application {}.", event, event.getApplicationID());
}
}
}
/**
* Private structure for encapsulating RequestInterceptor and
* ApplicationAttemptId instances.
*
*/
@Private
public static class RequestInterceptorChainWrapper {
private RequestInterceptor rootInterceptor;
private ApplicationAttemptId applicationAttemptId;
/**
* Initializes the wrapper with the specified parameters.
*
* @param interceptor the root request interceptor
* @param appAttemptId attempt id
*/
public synchronized void init(RequestInterceptor interceptor,
ApplicationAttemptId appAttemptId) {
rootInterceptor = interceptor;
applicationAttemptId = appAttemptId;
}
/**
* Gets the root request interceptor.
*
* @return the root request interceptor
*/
public synchronized RequestInterceptor getRootInterceptor() {
return rootInterceptor;
}
/**
* Gets the application attempt identifier.
*
* @return the application attempt identifier
*/
public synchronized ApplicationAttemptId getApplicationAttemptId() {
return applicationAttemptId;
}
/**
* Gets the application identifier.
*
* @return the application identifier
*/
public synchronized ApplicationId getApplicationId() {
return applicationAttemptId.getApplicationId();
}
}
}
|
apache/tomee | 35,954 | container/openejb-core/src/main/java/org/apache/openejb/assembler/classic/JndiBuilder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openejb.assembler.classic;
import org.apache.openejb.AppContext;
import org.apache.openejb.BeanContext;
import org.apache.openejb.InterfaceType;
import org.apache.openejb.ModuleContext;
import org.apache.openejb.OpenEJBRuntimeException;
import org.apache.openejb.core.ivm.naming.BusinessLocalBeanReference;
import org.apache.openejb.core.ivm.naming.BusinessLocalReference;
import org.apache.openejb.core.ivm.naming.BusinessRemoteReference;
import org.apache.openejb.core.ivm.naming.IntraVmJndiReference;
import org.apache.openejb.core.ivm.naming.ObjectReference;
import org.apache.openejb.loader.Options;
import org.apache.openejb.loader.SystemInstance;
import org.apache.openejb.spi.ContainerSystem;
import org.apache.openejb.util.LogCategory;
import org.apache.openejb.util.Logger;
import org.apache.openejb.util.StringTemplate;
import org.apache.openejb.util.Strings;
import jakarta.ejb.embeddable.EJBContainer;
import jakarta.jms.MessageListener;
import javax.naming.Context;
import javax.naming.NameAlreadyBoundException;
import javax.naming.NamingException;
import javax.naming.Reference;
import java.lang.reflect.Constructor;
import java.rmi.Remote;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.TreeMap;
import static org.apache.openejb.util.Classes.packageName;
/**
* @version $Rev$ $Date$
*/
public class JndiBuilder {
public static final String DEFAULT_NAME_KEY = "default";
final boolean embeddedEjbContainerApi;
public static final Logger logger = Logger.getInstance(LogCategory.OPENEJB_STARTUP, JndiBuilder.class.getPackage().getName());
private static final boolean USE_OLD_JNDI_NAMES = SystemInstance.get().getOptions().get("openejb.use-old-jndi-names", false);
private final Context openejbContext;
private static final String JNDINAME_STRATEGY_CLASS = "openejb.jndiname.strategy.class";
private static final String JNDINAME_FAILONCOLLISION = "openejb.jndiname.failoncollision";
private final boolean failOnCollision;
public JndiBuilder(final Context openejbContext) {
this.openejbContext = openejbContext;
final Options options = SystemInstance.get().getOptions();
failOnCollision = options.get(JNDINAME_FAILONCOLLISION, true);
embeddedEjbContainerApi = options.get(EJBContainer.class.getName(), false);
}
public void build(final EjbJarInfo ejbJar, final HashMap<String, BeanContext> deployments) {
final JndiNameStrategy strategy = createStrategy(ejbJar, deployments);
for (final EnterpriseBeanInfo beanInfo : ejbJar.enterpriseBeans) {
final BeanContext beanContext = deployments.get(beanInfo.ejbDeploymentId);
strategy.begin(beanContext);
try {
bind(ejbJar, beanContext, beanInfo, strategy);
} finally {
strategy.end();
}
}
}
public static JndiNameStrategy createStrategy(final EjbJarInfo ejbJar, final Map<String, BeanContext> deployments) {
final Options options = new Options(ejbJar.properties, SystemInstance.get().getOptions());
final Class strategyClass = options.get(JNDINAME_STRATEGY_CLASS, TemplatedStrategy.class);
final String strategyClassName = strategyClass.getName();
try {
try {
final Constructor constructor = strategyClass.getConstructor(EjbJarInfo.class, Map.class);
return (JndiNameStrategy) constructor.newInstance(ejbJar, deployments);
} catch (final NoSuchMethodException e) {
// no-op
}
final Constructor constructor = strategyClass.getConstructor();
return (JndiNameStrategy) constructor.newInstance();
} catch (final InstantiationException e) {
throw new IllegalStateException("Could not instantiate JndiNameStrategy: " + strategyClassName, e);
} catch (final IllegalAccessException e) {
throw new IllegalStateException("Could not access JndiNameStrategy: " + strategyClassName, e);
} catch (final Throwable t) {
throw new IllegalStateException("Could not create JndiNameStrategy: " + strategyClassName, t);
}
}
public interface JndiNameStrategy {
enum Interface {
REMOTE_HOME(InterfaceType.EJB_HOME, "RemoteHome", "home", ""),
LOCAL_HOME(InterfaceType.EJB_LOCAL_HOME, "LocalHome", "local-home", "Local"),
BUSINESS_LOCAL(InterfaceType.BUSINESS_LOCAL, "Local", "business-local", "BusinessLocal"),
LOCALBEAN(InterfaceType.LOCALBEAN, "LocalBean", "localbean", "LocalBean"),
BUSINESS_REMOTE(InterfaceType.BUSINESS_REMOTE, "Remote", "business-remote", "BusinessRemote"),
SERVICE_ENDPOINT(InterfaceType.SERVICE_ENDPOINT, "Endpoint", "service-endpoint", "ServiceEndpoint");
private final InterfaceType type;
private final String annotatedName;
private final String xmlName;
private final String xmlNameCc;
private final String openejbLegacy;
Interface(final InterfaceType type, final String annotatedName, final String xmlName, final String openejbLegacy) {
this.type = type;
this.annotatedName = annotatedName;
this.xmlName = xmlName;
this.xmlNameCc = Strings.camelCase(xmlName);
this.openejbLegacy = openejbLegacy;
}
public InterfaceType getType() {
return type;
}
public String getAnnotationName() {
return annotatedName;
}
public String getXmlName() {
return xmlName;
}
public String getXmlNameCc() {
return xmlNameCc;
}
public String getOpenejbLegacy() {
return openejbLegacy;
}
}
void begin(BeanContext beanContext);
String getName(Class interfce, String key, Interface type);
Map<String, String> getNames(Class interfce, Interface type);
void end();
}
// TODO: put these into the classpath and get them with xbean-finder
public static class TemplatedStrategy implements JndiNameStrategy {
private static final String JNDINAME_FORMAT = "openejb.jndiname.format";
private static final String KEYS = "default,local,global,app";
private final StringTemplate template;
private final HashMap<String, EnterpriseBeanInfo> beanInfos;
// Set in begin()
private BeanContext bean;
// Set in begin()
private HashMap<String, Map<String, StringTemplate>> templates;
private String format;
private Map<String, String> appContext;
private HashMap<String, String> beanContext;
public TemplatedStrategy(final EjbJarInfo ejbJarInfo, final Map<String, BeanContext> deployments) {
final Options options = new Options(ejbJarInfo.properties, SystemInstance.get().getOptions());
format = options.get(JNDINAME_FORMAT, "{deploymentId}{interfaceType.annotationName}");
{ // illegal format check
final int index = format.indexOf(':');
if (index > -1) {
logger.error("Illegal " + JNDINAME_FORMAT + " contains a colon ':'. Everything before the colon will be removed, '" + format + "' ");
format = format.substring(index + 1);
}
}
this.template = new StringTemplate(format);
beanInfos = new HashMap<>();
for (final EnterpriseBeanInfo beanInfo : ejbJarInfo.enterpriseBeans) {
beanInfos.put(beanInfo.ejbDeploymentId, beanInfo);
}
final Iterator<BeanContext> it = deployments.values().iterator();
if (!it.hasNext()) {
return;
}
// TODO we should just pass in the ModuleContext
final ModuleContext moduleContext = it.next().getModuleContext();
appContext = new HashMap<>();
putAll(appContext, SystemInstance.get().getProperties());
putAll(appContext, moduleContext.getAppContext().getProperties());
putAll(appContext, moduleContext.getProperties());
appContext.put("appName", moduleContext.getAppContext().getId());
appContext.put("appId", moduleContext.getAppContext().getId());
appContext.put("moduleName", moduleContext.getId());
appContext.put("moduleId", moduleContext.getId());
}
private void putAll(final Map<String, String> map, final Properties properties) {
for (final Map.Entry<Object, Object> e : properties.entrySet()) {
if (!(e.getValue() instanceof String)) {
continue;
}
if (!(e.getKey() instanceof String)) {
continue;
}
map.put((String) e.getKey(), (String) e.getValue());
}
}
private Map<String, StringTemplate> addTemplate(final Map<String, StringTemplate> map, final String key, final StringTemplate template) {
Map<String, StringTemplate> m = map;
if (m == null) {
m = new TreeMap<>();
}
m.put(key, template);
return m;
}
public void begin(final BeanContext bean) {
this.bean = bean;
final EnterpriseBeanInfo beanInfo = beanInfos.get(bean.getDeploymentID());
templates = new HashMap<>();
templates.put("", addTemplate(null, DEFAULT_NAME_KEY, template));
for (final JndiNameInfo nameInfo : beanInfo.jndiNamess) {
String intrface = nameInfo.intrface;
if (intrface == null) {
intrface = "";
}
templates.put(intrface, addTemplate(templates.get(intrface), getType(nameInfo.name), new StringTemplate(nameInfo.name)));
}
beanInfo.jndiNames.clear();
beanInfo.jndiNamess.clear();
this.beanContext = new HashMap<>(appContext);
putAll(this.beanContext, bean.getProperties());
this.beanContext.put("ejbType", bean.getComponentType().name());
this.beanContext.put("ejbClass", bean.getBeanClass().getName());
this.beanContext.put("ejbClass.simpleName", bean.getBeanClass().getSimpleName());
this.beanContext.put("ejbClass.packageName", packageName(bean.getBeanClass()));
this.beanContext.put("ejbName", bean.getEjbName());
this.beanContext.put("deploymentId", bean.getDeploymentID().toString());
}
private static String getType(final String name) {
int start = 0;
if (name.charAt(0) == '/') {
start = 1;
}
final int end = name.substring(start).indexOf('/');
if (end < 0) {
return DEFAULT_NAME_KEY;
}
return name.substring(start, end);
}
public void end() {
}
public String getName(final Class interfce, final String key, final Interface type) {
Map<String, StringTemplate> template = templates.get(interfce.getName());
if (template == null) {
template = templates.get(type.getAnnotationName());
}
if (template == null) {
template = templates.get("");
}
final Map<String, String> contextData = new HashMap<>(beanContext);
contextData.put("interfaceType", type.getAnnotationName());
contextData.put("interfaceType.annotationName", type.getAnnotationName());
contextData.put("interfaceType.annotationNameLC", type.getAnnotationName().toLowerCase());
contextData.put("interfaceType.xmlName", type.getXmlName());
contextData.put("interfaceType.xmlNameCc", type.getXmlNameCc());
contextData.put("interfaceType.openejbLegacyName", type.getOpenejbLegacy());
contextData.put("interfaceClass", interfce.getName());
contextData.put("interfaceClass.simpleName", interfce.getSimpleName());
contextData.put("interfaceClass.packageName", packageName(interfce));
StringTemplate stringTemplate = null;
if (template.containsKey(key)) {
stringTemplate = template.get(key);
} else {
stringTemplate = template.get(DEFAULT_NAME_KEY);
}
if (stringTemplate == null) {
stringTemplate = template.values().iterator().next();
}
return stringTemplate.apply(contextData);
}
@Override
public Map<String, String> getNames(final Class interfce, final Interface type) {
final Map<String, String> names = new HashMap<>();
for (final String key : KEYS.split(",")) {
names.put(key, getName(interfce, key, type));
}
return names;
}
}
public static class LegacyAddedSuffixStrategy implements JndiNameStrategy {
private BeanContext beanContext;
public void begin(final BeanContext beanContext) {
this.beanContext = beanContext;
}
public void end() {
}
public String getName(final Class interfce, final String key, final Interface type) {
String id = String.valueOf(beanContext.getDeploymentID());
if (id.charAt(0) == '/') {
id = id.substring(1);
}
switch (type) {
case REMOTE_HOME:
return id;
case LOCAL_HOME:
return id + "Local";
case BUSINESS_LOCAL:
return id + "BusinessLocal";
case BUSINESS_REMOTE:
return id + "BusinessRemote";
}
return id;
}
@Override
public Map<String, String> getNames(final Class interfce, final Interface type) {
final Map<String, String> names = new HashMap<>();
names.put("", getName(interfce, DEFAULT_NAME_KEY, type));
return names;
}
}
public void bind(final EjbJarInfo ejbJarInfo, final BeanContext bean, final EnterpriseBeanInfo beanInfo, final JndiNameStrategy strategy) {
// in an ear ejbmodule, webmodule etc can get the same name so avoid Comp binding issue
// and we shouldn't need it
if (BeanContext.Comp.class.equals(bean.getBeanClass())) {
return;
}
final Bindings bindings = new Bindings();
bean.set(Bindings.class, bindings);
Reference simpleNameRef = null;
final Object id = bean.getDeploymentID();
// Our openejb.jndiname.format concept works such that there doesn't need to be one explicit jndi name
// for each view that the bean may offer. If the user configured a name that results in few possible
// jndi names than views, this is ok. The 'optionalBind' method will do its best and log the results.
// This openejb.jndiname.format affects only the OpenEJB-specific global jndi tree.
//
// Should there be a so described "deficit" of names, we give precedence to the most universal and local first
// Essentially this:
// 1. Local Bean view as it implements all business interfaces of the bean, local or remote
// 2. The business local view -- "the" is applicable as create proxies with all possible local interfaces
// 3. The business remote view -- same note on "the" as above
// 4. The EJBLocalHome
// 5. The EJBHome
//
// This ordering also has an affect on which view wins the "java:global/{app}/{module}/{ejbName}" jndi name.
// In the case that the bean has just one view, the name refers to that view. Otherwise, the name is unspecified
try {
if (bean.isLocalbean()) {
final Class beanClass = bean.getBeanClass();
final BeanContext.BusinessLocalBeanHome home = bean.getBusinessLocalBeanHome();
final BusinessLocalBeanReference ref = new BusinessLocalBeanReference(home);
optionalBind(bindings, ref, "openejb/Deployment/" + format(id, beanClass.getName(), InterfaceType.LOCALBEAN));
// if the user inject the EJB using a parent class
if (!bean.getBeanClass().isInterface()) {
for (Class<?> clazz = bean.getBeanClass().getSuperclass(); !clazz.equals(Object.class); clazz = clazz.getSuperclass()) {
optionalBind(bindings, ref, "openejb/Deployment/" + format(id, clazz.getName(), InterfaceType.LOCALBEAN));
}
}
final String internalName = "openejb/Deployment/" + format(id, beanClass.getName(), InterfaceType.BUSINESS_LOCALBEAN_HOME);
bind(internalName, ref, bindings, beanInfo, beanClass);
final String name = strategy.getName(beanClass, DEFAULT_NAME_KEY, JndiNameStrategy.Interface.LOCALBEAN);
bind("openejb/local/" + name, ref, bindings, beanInfo, beanClass);
bindJava(bean, beanClass, ref, bindings, beanInfo);
if (USE_OLD_JNDI_NAMES) {
bean.getModuleContext().getAppContext().getBindings().put(name, ref);
}
simpleNameRef = ref;
}
} catch (final NamingException e) {
throw new OpenEJBRuntimeException("Unable to bind business remote deployment in jndi.", e);
}
try {
for (final Class interfce : bean.getBusinessLocalInterfaces()) {
final BeanContext.BusinessLocalHome home = bean.getBusinessLocalHome(interfce);
final BusinessLocalReference ref = new BusinessLocalReference(home);
optionalBind(bindings, ref, "openejb/Deployment/" + format(id, interfce.getName()));
final String internalName = "openejb/Deployment/" + format(id, interfce.getName(), InterfaceType.BUSINESS_LOCAL);
bind(internalName, ref, bindings, beanInfo, interfce);
final String name = strategy.getName(interfce, DEFAULT_NAME_KEY, JndiNameStrategy.Interface.BUSINESS_LOCAL);
final String externalName = "openejb/local/" + name;
bind(externalName, ref, bindings, beanInfo, interfce);
bindJava(bean, interfce, ref, bindings, beanInfo);
if (USE_OLD_JNDI_NAMES) {
bean.getModuleContext().getAppContext().getBindings().put(name, ref);
}
if (simpleNameRef == null) {
simpleNameRef = ref;
}
}
} catch (final NamingException e) {
throw new OpenEJBRuntimeException("Unable to bind business local interface for deployment " + id, e);
}
try {
for (final Class interfce : bean.getBusinessRemoteInterfaces()) {
final BeanContext.BusinessRemoteHome home = bean.getBusinessRemoteHome(interfce);
final BusinessRemoteReference ref = new BusinessRemoteReference(home);
optionalBind(bindings, ref, "openejb/Deployment/" + format(id, interfce.getName(), null));
final String internalName = "openejb/Deployment/" + format(id, interfce.getName(), InterfaceType.BUSINESS_REMOTE);
bind(internalName, ref, bindings, beanInfo, interfce);
final String name = strategy.getName(interfce, DEFAULT_NAME_KEY, JndiNameStrategy.Interface.BUSINESS_REMOTE);
bind("openejb/local/" + name, ref, bindings, beanInfo, interfce);
bind("openejb/remote/" + name, ref, bindings, beanInfo, interfce);
bind("openejb/remote/" + computeGlobalName(bean, interfce), ref, bindings, beanInfo, interfce);
bindJava(bean, interfce, ref, bindings, beanInfo);
if (USE_OLD_JNDI_NAMES) {
bean.getModuleContext().getAppContext().getBindings().put(name, ref);
}
if (simpleNameRef == null) {
simpleNameRef = ref;
}
}
} catch (final NamingException e) {
throw new OpenEJBRuntimeException("Unable to bind business remote deployment in jndi.", e);
}
try {
final Class localHomeInterface = bean.getLocalHomeInterface();
if (localHomeInterface != null) {
final ObjectReference ref = new ObjectReference(bean.getEJBLocalHome());
String name = strategy.getName(bean.getLocalHomeInterface(), DEFAULT_NAME_KEY, JndiNameStrategy.Interface.LOCAL_HOME);
bind("openejb/local/" + name, ref, bindings, beanInfo, localHomeInterface);
optionalBind(bindings, ref, "openejb/Deployment/" + format(id, localHomeInterface.getName(), InterfaceType.EJB_LOCAL_HOME));
name = "openejb/Deployment/" + format(id, bean.getLocalInterface().getName());
bind(name, ref, bindings, beanInfo, localHomeInterface);
name = "openejb/Deployment/" + format(id, bean.getLocalInterface().getName(), InterfaceType.EJB_LOCAL);
bind(name, ref, bindings, beanInfo, localHomeInterface);
bindJava(bean, localHomeInterface, ref, bindings, beanInfo);
if (simpleNameRef == null) {
simpleNameRef = ref;
}
}
} catch (final NamingException e) {
throw new OpenEJBRuntimeException("Unable to bind local home interface for deployment " + id, e);
}
try {
final Class homeInterface = bean.getHomeInterface();
if (homeInterface != null) {
final ObjectReference ref = new ObjectReference(bean.getEJBHome());
String name = strategy.getName(homeInterface, DEFAULT_NAME_KEY, JndiNameStrategy.Interface.REMOTE_HOME);
bind("openejb/local/" + name, ref, bindings, beanInfo, homeInterface);
bind("openejb/remote/" + name, ref, bindings, beanInfo, homeInterface);
optionalBind(bindings, ref, "openejb/Deployment/" + format(id, homeInterface.getName(), InterfaceType.EJB_HOME));
name = "openejb/Deployment/" + format(id, bean.getRemoteInterface().getName());
bind(name, ref, bindings, beanInfo, homeInterface);
name = "openejb/Deployment/" + format(id, bean.getRemoteInterface().getName(), InterfaceType.EJB_OBJECT);
bind(name, ref, bindings, beanInfo, homeInterface);
bindJava(bean, homeInterface, ref, bindings, beanInfo);
if (simpleNameRef == null) {
simpleNameRef = ref;
}
}
} catch (final NamingException e) {
throw new OpenEJBRuntimeException("Unable to bind remote home interface for deployment " + id, e);
}
try {
if (simpleNameRef != null) {
bindJava(bean, null, simpleNameRef, bindings, beanInfo);
}
} catch (final NamingException e) {
throw new OpenEJBRuntimeException("Unable to bind simple java:global name in jndi", e);
}
try {
if (MessageListener.class.equals(bean.getMdbInterface())) {
final String destinationId = bean.getDestinationId();
final String jndiName = "openejb/Resource/" + destinationId;
final Reference reference = new IntraVmJndiReference(jndiName);
final String deploymentId = id.toString();
bind("openejb/local/" + deploymentId, reference, bindings, beanInfo, MessageListener.class);
bind("openejb/remote/" + deploymentId, reference, bindings, beanInfo, MessageListener.class);
}
} catch (final NamingException e) {
throw new OpenEJBRuntimeException("Unable to bind mdb destination in jndi.", e);
} catch (final NoClassDefFoundError ncdfe) {
// no-op: no jms API
}
}
private void optionalBind(final Bindings bindings, final Reference ref, final String name) throws NamingException {
try {
openejbContext.bind(name, ref);
logger.debug("bound ejb at name: " + name + ", ref: " + ref);
bindings.add(name);
} catch (final NamingException okIfBindFails) {
logger.debug("failed to bind ejb at name: " + name + ", ref: " + ref);
}
}
public static String format(final Object deploymentId, final String interfaceClassName) {
return format((String) deploymentId, interfaceClassName, null);
}
public static String format(final Object deploymentId, final String interfaceClassName, final InterfaceType interfaceType) {
return format((String) deploymentId, interfaceClassName, interfaceType);
}
public static String format(final String deploymentId, final String interfaceClassName, final InterfaceType interfaceType) {
return deploymentId + "/" + interfaceClassName + (interfaceType == null ? "" : "!" + interfaceType.getSpecName());
}
private void bind(final String name, final Reference ref, final Bindings bindings, final EnterpriseBeanInfo beanInfo, final Class intrface) throws NamingException {
if (name.startsWith("openejb/local/") || name.startsWith("openejb/remote/") || name.startsWith("openejb/localbean/") || name.startsWith("openejb/global/")) {
final String externalName = name.replaceFirst("openejb/[^/]+/", "");
if (bindings.contains(name)) {
// We bind under two sections of jndi, only warn once.. the user doesn't need to be bothered with that detail
if (name.startsWith("openejb/local/")) {
logger.debug("Duplicate: Jndi(name=" + externalName + ")");
}
return;
}
try {
openejbContext.bind(name, ref);
bindings.add(name);
if (!beanInfo.jndiNames.contains(externalName)) {
beanInfo.jndiNames.add(externalName);
final JndiNameInfo nameInfo = new JndiNameInfo();
nameInfo.intrface = intrface == null ? null : intrface.getName();
nameInfo.name = externalName;
beanInfo.jndiNamess.add(nameInfo);
if (!embeddedEjbContainerApi
// filtering internal bean
&& !(beanInfo instanceof ManagedBeanInfo && ((ManagedBeanInfo) beanInfo).hidden)) {
logger.info("Jndi(name=" + externalName + ") --> Ejb(deployment-id=" + beanInfo.ejbDeploymentId + ")");
}
}
} catch (final NameAlreadyBoundException e) {
final BeanContext deployment = findNameOwner(name);
if (deployment != null) {
logger.error("Jndi(name=" + externalName + ") cannot be bound to Ejb(deployment-id=" + beanInfo.ejbDeploymentId + "). Name already taken by Ejb(deployment-id=" + deployment.getDeploymentID() + ")");
} else {
logger.error("Jndi(name=" + externalName + ") cannot be bound to Ejb(deployment-id=" + beanInfo.ejbDeploymentId + "). Name already taken by another object in the system.");
}
// Construct a new exception as the IvmContext doesn't include
// the name in the exception that it throws
if (failOnCollision) {
throw new NameAlreadyBoundException(externalName);
}
}
} else {
try {
openejbContext.bind(name, ref);
logger.debug("bound ejb at name: " + name + ", ref: " + ref);
bindings.add(name);
} catch (final NameAlreadyBoundException e) {
logger.error("Jndi name could not be bound; it may be taken by another ejb. Jndi(name=" + name + ")");
// Construct a new exception as the IvmContext doesn't include
// the name in the exception that it throws
throw new NameAlreadyBoundException(name);
}
}
}
//ee6 specified ejb bindings in module, app, and global contexts
private String computeGlobalName(final BeanContext cdi, final Class<?> intrface) {
final ModuleContext module = cdi.getModuleContext();
final AppContext application = module.getAppContext();
final String appName = application.isStandaloneModule() ? "" : application.getId() + "/";
final String moduleName = moduleName(cdi);
String beanName = cdi.getEjbName();
if (intrface != null) {
beanName = beanName + "!" + intrface.getName();
}
return "global/" + appName + moduleName + beanName;
}
private String moduleName(BeanContext cdi) {
String moduleName = cdi.getModuleName() + "/";
if (moduleName.startsWith("ear-scoped-cdi-beans_")) {
moduleName = moduleName.substring("ear-scoped-cdi-beans_".length());
}
return moduleName;
}
private void bindJava(final BeanContext cdi, final Class intrface, final Reference ref, final Bindings bindings, final EnterpriseBeanInfo beanInfo) throws NamingException {
final ModuleContext module = cdi.getModuleContext();
final AppContext application = module.getAppContext();
final Context moduleContext = module.getModuleJndiContext();
final Context appContext = application.getAppJndiContext();
final Context globalContext = application.getGlobalJndiContext();
final String appName = application.isStandaloneModule() ? "" : application.getId() + "/";
String moduleName = moduleName(cdi);
if (moduleName.startsWith("/")) {
moduleName = moduleName.substring(1);
}
String beanName = cdi.getEjbName();
if (intrface != null) {
beanName = beanName + "!" + intrface.getName();
}
final String globalName = "global/" + appName + moduleName + beanName;
try {
if (embeddedEjbContainerApi
&& !(beanInfo instanceof ManagedBeanInfo && ((ManagedBeanInfo) beanInfo).hidden)) {
logger.info(String.format("Jndi(name=\"java:%s\")", globalName));
}
globalContext.bind(globalName, ref);
application.getBindings().put(globalName, ref);
bind("openejb/global/" + globalName, ref, bindings, beanInfo, intrface);
} catch (final NameAlreadyBoundException e) {
//one interface in more than one role (e.g. both Local and Remote
return;
}
appContext.bind("app/" + moduleName + beanName, ref);
application.getBindings().put("app/" + moduleName + beanName, ref);
final String moduleJndi = "module/" + beanName;
moduleContext.bind(moduleJndi, ref);
// contextual if the same ejb (api) is deployed in 2 wars of an ear
ContextualEjbLookup contextual = ContextualEjbLookup.class.cast(application.getBindings().get(moduleJndi));
if (contextual == null) {
final Map<BeanContext, Object> potentials = new HashMap<>();
contextual = new ContextualEjbLookup(potentials, ref);
application.getBindings().put(moduleJndi, contextual); // TODO: we shouldn't do it but use web bindings
}
contextual.potentials.put(cdi, ref);
}
/**
* This may not be that performant, but it's certain to be faster than the
* user having to track down which deployment is using a particular jndi name
*
* @param name
* @return .
*/
private BeanContext findNameOwner(final String name) {
final ContainerSystem containerSystem = SystemInstance.get().getComponent(ContainerSystem.class);
for (final BeanContext beanContext : containerSystem.deployments()) {
final Bindings bindings = beanContext.get(Bindings.class);
if (bindings != null && bindings.getBindings().contains(name)) {
return beanContext;
}
}
return null;
}
protected static final class Bindings {
private final List<String> bindings = new ArrayList<>();
public List<String> getBindings() {
return bindings;
}
public boolean add(final String o) {
return bindings.add(o);
}
public boolean contains(final String o) {
return bindings.contains(o);
}
}
public static class RemoteInterfaceComparator implements Comparator<Class> {
public int compare(final Class a, final Class b) {
final boolean aIsRmote = Remote.class.isAssignableFrom(a);
final boolean bIsRmote = Remote.class.isAssignableFrom(b);
if (aIsRmote == bIsRmote) {
return 0;
}
return aIsRmote ? 1 : -1;
}
}
public static class ContextualEjbLookup extends org.apache.openejb.core.ivm.naming.Reference {
private final Map<BeanContext, Object> potentials;
private final Object defaultValue;
public ContextualEjbLookup(final Map<BeanContext, Object> potentials, final Object defaultValue) {
this.potentials = potentials;
this.defaultValue = defaultValue;
}
@Override
public Object getObject() throws NamingException {
if (potentials.size() == 1) {
return unwrap(defaultValue);
}
final ClassLoader loader = Thread.currentThread().getContextClassLoader();
if (loader != null) {
for (final Map.Entry<BeanContext, Object> o : potentials.entrySet()) {
if (loader.equals(o.getKey().getClassLoader())) {
return unwrap(o.getValue());
}
}
}
return unwrap(defaultValue);
}
private Object unwrap(final Object value) throws NamingException {
if (org.apache.openejb.core.ivm.naming.Reference.class.isInstance(value)) { // pretty sure
return org.apache.openejb.core.ivm.naming.Reference.class.cast(value).getObject();
}
return value;
}
}
}
|
googleapis/google-cloud-java | 35,853 | java-shell/proto-google-cloud-shell-v1/src/main/java/com/google/cloud/shell/v1/StartEnvironmentRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/shell/v1/cloudshell.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.shell.v1;
/**
*
*
* <pre>
* Request message for
* [StartEnvironment][google.cloud.shell.v1.CloudShellService.StartEnvironment].
* </pre>
*
* Protobuf type {@code google.cloud.shell.v1.StartEnvironmentRequest}
*/
public final class StartEnvironmentRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.shell.v1.StartEnvironmentRequest)
StartEnvironmentRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use StartEnvironmentRequest.newBuilder() to construct.
private StartEnvironmentRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private StartEnvironmentRequest() {
name_ = "";
accessToken_ = "";
publicKeys_ = com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new StartEnvironmentRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.shell.v1.CloudShellProto
.internal_static_google_cloud_shell_v1_StartEnvironmentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.shell.v1.CloudShellProto
.internal_static_google_cloud_shell_v1_StartEnvironmentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.shell.v1.StartEnvironmentRequest.class,
com.google.cloud.shell.v1.StartEnvironmentRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Name of the resource that should be started, for example
* `users/me/environments/default` or
* `users/someone@example.com/environments/default`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of the resource that should be started, for example
* `users/me/environments/default` or
* `users/someone@example.com/environments/default`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ACCESS_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object accessToken_ = "";
/**
*
*
* <pre>
* The initial access token passed to the environment. If this is present and
* valid, the environment will be pre-authenticated with gcloud so that the
* user can run gcloud commands in Cloud Shell without having to log in. This
* code can be updated later by calling AuthorizeEnvironment.
* </pre>
*
* <code>string access_token = 2;</code>
*
* @return The accessToken.
*/
@java.lang.Override
public java.lang.String getAccessToken() {
java.lang.Object ref = accessToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
accessToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The initial access token passed to the environment. If this is present and
* valid, the environment will be pre-authenticated with gcloud so that the
* user can run gcloud commands in Cloud Shell without having to log in. This
* code can be updated later by calling AuthorizeEnvironment.
* </pre>
*
* <code>string access_token = 2;</code>
*
* @return The bytes for accessToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getAccessTokenBytes() {
java.lang.Object ref = accessToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
accessToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PUBLIC_KEYS_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList publicKeys_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
* <pre>
* Public keys that should be added to the environment before it is started.
* </pre>
*
* <code>repeated string public_keys = 3;</code>
*
* @return A list containing the publicKeys.
*/
public com.google.protobuf.ProtocolStringList getPublicKeysList() {
return publicKeys_;
}
/**
*
*
* <pre>
* Public keys that should be added to the environment before it is started.
* </pre>
*
* <code>repeated string public_keys = 3;</code>
*
* @return The count of publicKeys.
*/
public int getPublicKeysCount() {
return publicKeys_.size();
}
/**
*
*
* <pre>
* Public keys that should be added to the environment before it is started.
* </pre>
*
* <code>repeated string public_keys = 3;</code>
*
* @param index The index of the element to return.
* @return The publicKeys at the given index.
*/
public java.lang.String getPublicKeys(int index) {
return publicKeys_.get(index);
}
/**
*
*
* <pre>
* Public keys that should be added to the environment before it is started.
* </pre>
*
* <code>repeated string public_keys = 3;</code>
*
* @param index The index of the value to return.
* @return The bytes of the publicKeys at the given index.
*/
public com.google.protobuf.ByteString getPublicKeysBytes(int index) {
return publicKeys_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(accessToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, accessToken_);
}
for (int i = 0; i < publicKeys_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, publicKeys_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(accessToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, accessToken_);
}
{
int dataSize = 0;
for (int i = 0; i < publicKeys_.size(); i++) {
dataSize += computeStringSizeNoTag(publicKeys_.getRaw(i));
}
size += dataSize;
size += 1 * getPublicKeysList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.shell.v1.StartEnvironmentRequest)) {
return super.equals(obj);
}
com.google.cloud.shell.v1.StartEnvironmentRequest other =
(com.google.cloud.shell.v1.StartEnvironmentRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!getAccessToken().equals(other.getAccessToken())) return false;
if (!getPublicKeysList().equals(other.getPublicKeysList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + ACCESS_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getAccessToken().hashCode();
if (getPublicKeysCount() > 0) {
hash = (37 * hash) + PUBLIC_KEYS_FIELD_NUMBER;
hash = (53 * hash) + getPublicKeysList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.shell.v1.StartEnvironmentRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.shell.v1.StartEnvironmentRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.shell.v1.StartEnvironmentRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.shell.v1.StartEnvironmentRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.shell.v1.StartEnvironmentRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.shell.v1.StartEnvironmentRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.shell.v1.StartEnvironmentRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.shell.v1.StartEnvironmentRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.shell.v1.StartEnvironmentRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.shell.v1.StartEnvironmentRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.shell.v1.StartEnvironmentRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.shell.v1.StartEnvironmentRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.shell.v1.StartEnvironmentRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [StartEnvironment][google.cloud.shell.v1.CloudShellService.StartEnvironment].
* </pre>
*
* Protobuf type {@code google.cloud.shell.v1.StartEnvironmentRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.shell.v1.StartEnvironmentRequest)
com.google.cloud.shell.v1.StartEnvironmentRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.shell.v1.CloudShellProto
.internal_static_google_cloud_shell_v1_StartEnvironmentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.shell.v1.CloudShellProto
.internal_static_google_cloud_shell_v1_StartEnvironmentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.shell.v1.StartEnvironmentRequest.class,
com.google.cloud.shell.v1.StartEnvironmentRequest.Builder.class);
}
// Construct using com.google.cloud.shell.v1.StartEnvironmentRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
accessToken_ = "";
publicKeys_ = com.google.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.shell.v1.CloudShellProto
.internal_static_google_cloud_shell_v1_StartEnvironmentRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.shell.v1.StartEnvironmentRequest getDefaultInstanceForType() {
return com.google.cloud.shell.v1.StartEnvironmentRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.shell.v1.StartEnvironmentRequest build() {
com.google.cloud.shell.v1.StartEnvironmentRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.shell.v1.StartEnvironmentRequest buildPartial() {
com.google.cloud.shell.v1.StartEnvironmentRequest result =
new com.google.cloud.shell.v1.StartEnvironmentRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.shell.v1.StartEnvironmentRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.accessToken_ = accessToken_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
publicKeys_.makeImmutable();
result.publicKeys_ = publicKeys_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.shell.v1.StartEnvironmentRequest) {
return mergeFrom((com.google.cloud.shell.v1.StartEnvironmentRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.shell.v1.StartEnvironmentRequest other) {
if (other == com.google.cloud.shell.v1.StartEnvironmentRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getAccessToken().isEmpty()) {
accessToken_ = other.accessToken_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.publicKeys_.isEmpty()) {
if (publicKeys_.isEmpty()) {
publicKeys_ = other.publicKeys_;
bitField0_ |= 0x00000004;
} else {
ensurePublicKeysIsMutable();
publicKeys_.addAll(other.publicKeys_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
accessToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
ensurePublicKeysIsMutable();
publicKeys_.add(s);
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Name of the resource that should be started, for example
* `users/me/environments/default` or
* `users/someone@example.com/environments/default`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of the resource that should be started, for example
* `users/me/environments/default` or
* `users/someone@example.com/environments/default`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of the resource that should be started, for example
* `users/me/environments/default` or
* `users/someone@example.com/environments/default`.
* </pre>
*
* <code>string name = 1;</code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the resource that should be started, for example
* `users/me/environments/default` or
* `users/someone@example.com/environments/default`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the resource that should be started, for example
* `users/me/environments/default` or
* `users/someone@example.com/environments/default`.
* </pre>
*
* <code>string name = 1;</code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object accessToken_ = "";
/**
*
*
* <pre>
* The initial access token passed to the environment. If this is present and
* valid, the environment will be pre-authenticated with gcloud so that the
* user can run gcloud commands in Cloud Shell without having to log in. This
* code can be updated later by calling AuthorizeEnvironment.
* </pre>
*
* <code>string access_token = 2;</code>
*
* @return The accessToken.
*/
public java.lang.String getAccessToken() {
java.lang.Object ref = accessToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
accessToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The initial access token passed to the environment. If this is present and
* valid, the environment will be pre-authenticated with gcloud so that the
* user can run gcloud commands in Cloud Shell without having to log in. This
* code can be updated later by calling AuthorizeEnvironment.
* </pre>
*
* <code>string access_token = 2;</code>
*
* @return The bytes for accessToken.
*/
public com.google.protobuf.ByteString getAccessTokenBytes() {
java.lang.Object ref = accessToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
accessToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The initial access token passed to the environment. If this is present and
* valid, the environment will be pre-authenticated with gcloud so that the
* user can run gcloud commands in Cloud Shell without having to log in. This
* code can be updated later by calling AuthorizeEnvironment.
* </pre>
*
* <code>string access_token = 2;</code>
*
* @param value The accessToken to set.
* @return This builder for chaining.
*/
public Builder setAccessToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
accessToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The initial access token passed to the environment. If this is present and
* valid, the environment will be pre-authenticated with gcloud so that the
* user can run gcloud commands in Cloud Shell without having to log in. This
* code can be updated later by calling AuthorizeEnvironment.
* </pre>
*
* <code>string access_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearAccessToken() {
accessToken_ = getDefaultInstance().getAccessToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The initial access token passed to the environment. If this is present and
* valid, the environment will be pre-authenticated with gcloud so that the
* user can run gcloud commands in Cloud Shell without having to log in. This
* code can be updated later by calling AuthorizeEnvironment.
* </pre>
*
* <code>string access_token = 2;</code>
*
* @param value The bytes for accessToken to set.
* @return This builder for chaining.
*/
public Builder setAccessTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
accessToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList publicKeys_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensurePublicKeysIsMutable() {
if (!publicKeys_.isModifiable()) {
publicKeys_ = new com.google.protobuf.LazyStringArrayList(publicKeys_);
}
bitField0_ |= 0x00000004;
}
/**
*
*
* <pre>
* Public keys that should be added to the environment before it is started.
* </pre>
*
* <code>repeated string public_keys = 3;</code>
*
* @return A list containing the publicKeys.
*/
public com.google.protobuf.ProtocolStringList getPublicKeysList() {
publicKeys_.makeImmutable();
return publicKeys_;
}
/**
*
*
* <pre>
* Public keys that should be added to the environment before it is started.
* </pre>
*
* <code>repeated string public_keys = 3;</code>
*
* @return The count of publicKeys.
*/
public int getPublicKeysCount() {
return publicKeys_.size();
}
/**
*
*
* <pre>
* Public keys that should be added to the environment before it is started.
* </pre>
*
* <code>repeated string public_keys = 3;</code>
*
* @param index The index of the element to return.
* @return The publicKeys at the given index.
*/
public java.lang.String getPublicKeys(int index) {
return publicKeys_.get(index);
}
/**
*
*
* <pre>
* Public keys that should be added to the environment before it is started.
* </pre>
*
* <code>repeated string public_keys = 3;</code>
*
* @param index The index of the value to return.
* @return The bytes of the publicKeys at the given index.
*/
public com.google.protobuf.ByteString getPublicKeysBytes(int index) {
return publicKeys_.getByteString(index);
}
/**
*
*
* <pre>
* Public keys that should be added to the environment before it is started.
* </pre>
*
* <code>repeated string public_keys = 3;</code>
*
* @param index The index to set the value at.
* @param value The publicKeys to set.
* @return This builder for chaining.
*/
public Builder setPublicKeys(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensurePublicKeysIsMutable();
publicKeys_.set(index, value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Public keys that should be added to the environment before it is started.
* </pre>
*
* <code>repeated string public_keys = 3;</code>
*
* @param value The publicKeys to add.
* @return This builder for chaining.
*/
public Builder addPublicKeys(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensurePublicKeysIsMutable();
publicKeys_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Public keys that should be added to the environment before it is started.
* </pre>
*
* <code>repeated string public_keys = 3;</code>
*
* @param values The publicKeys to add.
* @return This builder for chaining.
*/
public Builder addAllPublicKeys(java.lang.Iterable<java.lang.String> values) {
ensurePublicKeysIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, publicKeys_);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Public keys that should be added to the environment before it is started.
* </pre>
*
* <code>repeated string public_keys = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPublicKeys() {
publicKeys_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
;
onChanged();
return this;
}
/**
*
*
* <pre>
* Public keys that should be added to the environment before it is started.
* </pre>
*
* <code>repeated string public_keys = 3;</code>
*
* @param value The bytes of the publicKeys to add.
* @return This builder for chaining.
*/
public Builder addPublicKeysBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensurePublicKeysIsMutable();
publicKeys_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.shell.v1.StartEnvironmentRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.shell.v1.StartEnvironmentRequest)
private static final com.google.cloud.shell.v1.StartEnvironmentRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.shell.v1.StartEnvironmentRequest();
}
public static com.google.cloud.shell.v1.StartEnvironmentRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<StartEnvironmentRequest> PARSER =
new com.google.protobuf.AbstractParser<StartEnvironmentRequest>() {
@java.lang.Override
public StartEnvironmentRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<StartEnvironmentRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<StartEnvironmentRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.shell.v1.StartEnvironmentRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,922 | java-datacatalog/proto-google-cloud-datacatalog-v1/src/main/java/com/google/cloud/datacatalog/v1/UpdatePolicyTagRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datacatalog/v1/policytagmanager.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datacatalog.v1;
/**
*
*
* <pre>
* Request message for
* [UpdatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.UpdatePolicyTag].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.v1.UpdatePolicyTagRequest}
*/
public final class UpdatePolicyTagRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datacatalog.v1.UpdatePolicyTagRequest)
UpdatePolicyTagRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdatePolicyTagRequest.newBuilder() to construct.
private UpdatePolicyTagRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdatePolicyTagRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdatePolicyTagRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.v1.PolicyTagManagerProto
.internal_static_google_cloud_datacatalog_v1_UpdatePolicyTagRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.v1.PolicyTagManagerProto
.internal_static_google_cloud_datacatalog_v1_UpdatePolicyTagRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest.class,
com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest.Builder.class);
}
private int bitField0_;
public static final int POLICY_TAG_FIELD_NUMBER = 1;
private com.google.cloud.datacatalog.v1.PolicyTag policyTag_;
/**
*
*
* <pre>
* The policy tag to update. You can update only its description, display
* name, and parent policy tag fields.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.PolicyTag policy_tag = 1;</code>
*
* @return Whether the policyTag field is set.
*/
@java.lang.Override
public boolean hasPolicyTag() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The policy tag to update. You can update only its description, display
* name, and parent policy tag fields.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.PolicyTag policy_tag = 1;</code>
*
* @return The policyTag.
*/
@java.lang.Override
public com.google.cloud.datacatalog.v1.PolicyTag getPolicyTag() {
return policyTag_ == null
? com.google.cloud.datacatalog.v1.PolicyTag.getDefaultInstance()
: policyTag_;
}
/**
*
*
* <pre>
* The policy tag to update. You can update only its description, display
* name, and parent policy tag fields.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.PolicyTag policy_tag = 1;</code>
*/
@java.lang.Override
public com.google.cloud.datacatalog.v1.PolicyTagOrBuilder getPolicyTagOrBuilder() {
return policyTag_ == null
? com.google.cloud.datacatalog.v1.PolicyTag.getDefaultInstance()
: policyTag_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Specifies the fields to update.
*
* You can update only display name, description, and parent policy tag.
* If not set, defaults to all updatable fields.
* For more information, see [FieldMask]
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask).
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Specifies the fields to update.
*
* You can update only display name, description, and parent policy tag.
* If not set, defaults to all updatable fields.
* For more information, see [FieldMask]
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask).
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Specifies the fields to update.
*
* You can update only display name, description, and parent policy tag.
* If not set, defaults to all updatable fields.
* For more information, see [FieldMask]
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask).
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getPolicyTag());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getPolicyTag());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest)) {
return super.equals(obj);
}
com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest other =
(com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest) obj;
if (hasPolicyTag() != other.hasPolicyTag()) return false;
if (hasPolicyTag()) {
if (!getPolicyTag().equals(other.getPolicyTag())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasPolicyTag()) {
hash = (37 * hash) + POLICY_TAG_FIELD_NUMBER;
hash = (53 * hash) + getPolicyTag().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [UpdatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.UpdatePolicyTag].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.v1.UpdatePolicyTagRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.v1.UpdatePolicyTagRequest)
com.google.cloud.datacatalog.v1.UpdatePolicyTagRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.v1.PolicyTagManagerProto
.internal_static_google_cloud_datacatalog_v1_UpdatePolicyTagRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.v1.PolicyTagManagerProto
.internal_static_google_cloud_datacatalog_v1_UpdatePolicyTagRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest.class,
com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest.Builder.class);
}
// Construct using com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getPolicyTagFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
policyTag_ = null;
if (policyTagBuilder_ != null) {
policyTagBuilder_.dispose();
policyTagBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datacatalog.v1.PolicyTagManagerProto
.internal_static_google_cloud_datacatalog_v1_UpdatePolicyTagRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest getDefaultInstanceForType() {
return com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest build() {
com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest buildPartial() {
com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest result =
new com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.policyTag_ = policyTagBuilder_ == null ? policyTag_ : policyTagBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest) {
return mergeFrom((com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest other) {
if (other == com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest.getDefaultInstance())
return this;
if (other.hasPolicyTag()) {
mergePolicyTag(other.getPolicyTag());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getPolicyTagFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.datacatalog.v1.PolicyTag policyTag_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.v1.PolicyTag,
com.google.cloud.datacatalog.v1.PolicyTag.Builder,
com.google.cloud.datacatalog.v1.PolicyTagOrBuilder>
policyTagBuilder_;
/**
*
*
* <pre>
* The policy tag to update. You can update only its description, display
* name, and parent policy tag fields.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.PolicyTag policy_tag = 1;</code>
*
* @return Whether the policyTag field is set.
*/
public boolean hasPolicyTag() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The policy tag to update. You can update only its description, display
* name, and parent policy tag fields.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.PolicyTag policy_tag = 1;</code>
*
* @return The policyTag.
*/
public com.google.cloud.datacatalog.v1.PolicyTag getPolicyTag() {
if (policyTagBuilder_ == null) {
return policyTag_ == null
? com.google.cloud.datacatalog.v1.PolicyTag.getDefaultInstance()
: policyTag_;
} else {
return policyTagBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The policy tag to update. You can update only its description, display
* name, and parent policy tag fields.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.PolicyTag policy_tag = 1;</code>
*/
public Builder setPolicyTag(com.google.cloud.datacatalog.v1.PolicyTag value) {
if (policyTagBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
policyTag_ = value;
} else {
policyTagBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The policy tag to update. You can update only its description, display
* name, and parent policy tag fields.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.PolicyTag policy_tag = 1;</code>
*/
public Builder setPolicyTag(com.google.cloud.datacatalog.v1.PolicyTag.Builder builderForValue) {
if (policyTagBuilder_ == null) {
policyTag_ = builderForValue.build();
} else {
policyTagBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The policy tag to update. You can update only its description, display
* name, and parent policy tag fields.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.PolicyTag policy_tag = 1;</code>
*/
public Builder mergePolicyTag(com.google.cloud.datacatalog.v1.PolicyTag value) {
if (policyTagBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& policyTag_ != null
&& policyTag_ != com.google.cloud.datacatalog.v1.PolicyTag.getDefaultInstance()) {
getPolicyTagBuilder().mergeFrom(value);
} else {
policyTag_ = value;
}
} else {
policyTagBuilder_.mergeFrom(value);
}
if (policyTag_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The policy tag to update. You can update only its description, display
* name, and parent policy tag fields.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.PolicyTag policy_tag = 1;</code>
*/
public Builder clearPolicyTag() {
bitField0_ = (bitField0_ & ~0x00000001);
policyTag_ = null;
if (policyTagBuilder_ != null) {
policyTagBuilder_.dispose();
policyTagBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The policy tag to update. You can update only its description, display
* name, and parent policy tag fields.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.PolicyTag policy_tag = 1;</code>
*/
public com.google.cloud.datacatalog.v1.PolicyTag.Builder getPolicyTagBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getPolicyTagFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The policy tag to update. You can update only its description, display
* name, and parent policy tag fields.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.PolicyTag policy_tag = 1;</code>
*/
public com.google.cloud.datacatalog.v1.PolicyTagOrBuilder getPolicyTagOrBuilder() {
if (policyTagBuilder_ != null) {
return policyTagBuilder_.getMessageOrBuilder();
} else {
return policyTag_ == null
? com.google.cloud.datacatalog.v1.PolicyTag.getDefaultInstance()
: policyTag_;
}
}
/**
*
*
* <pre>
* The policy tag to update. You can update only its description, display
* name, and parent policy tag fields.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.PolicyTag policy_tag = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.v1.PolicyTag,
com.google.cloud.datacatalog.v1.PolicyTag.Builder,
com.google.cloud.datacatalog.v1.PolicyTagOrBuilder>
getPolicyTagFieldBuilder() {
if (policyTagBuilder_ == null) {
policyTagBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.v1.PolicyTag,
com.google.cloud.datacatalog.v1.PolicyTag.Builder,
com.google.cloud.datacatalog.v1.PolicyTagOrBuilder>(
getPolicyTag(), getParentForChildren(), isClean());
policyTag_ = null;
}
return policyTagBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Specifies the fields to update.
*
* You can update only display name, description, and parent policy tag.
* If not set, defaults to all updatable fields.
* For more information, see [FieldMask]
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask).
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Specifies the fields to update.
*
* You can update only display name, description, and parent policy tag.
* If not set, defaults to all updatable fields.
* For more information, see [FieldMask]
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask).
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Specifies the fields to update.
*
* You can update only display name, description, and parent policy tag.
* If not set, defaults to all updatable fields.
* For more information, see [FieldMask]
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask).
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Specifies the fields to update.
*
* You can update only display name, description, and parent policy tag.
* If not set, defaults to all updatable fields.
* For more information, see [FieldMask]
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask).
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Specifies the fields to update.
*
* You can update only display name, description, and parent policy tag.
* If not set, defaults to all updatable fields.
* For more information, see [FieldMask]
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask).
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Specifies the fields to update.
*
* You can update only display name, description, and parent policy tag.
* If not set, defaults to all updatable fields.
* For more information, see [FieldMask]
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask).
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Specifies the fields to update.
*
* You can update only display name, description, and parent policy tag.
* If not set, defaults to all updatable fields.
* For more information, see [FieldMask]
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask).
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Specifies the fields to update.
*
* You can update only display name, description, and parent policy tag.
* If not set, defaults to all updatable fields.
* For more information, see [FieldMask]
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask).
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Specifies the fields to update.
*
* You can update only display name, description, and parent policy tag.
* If not set, defaults to all updatable fields.
* For more information, see [FieldMask]
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask).
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.v1.UpdatePolicyTagRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1.UpdatePolicyTagRequest)
private static final com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest();
}
public static com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdatePolicyTagRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdatePolicyTagRequest>() {
@java.lang.Override
public UpdatePolicyTagRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdatePolicyTagRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdatePolicyTagRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.UpdatePolicyTagRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/ignite-3 | 35,223 | modules/jdbc/src/integrationTest/java/org/apache/ignite/internal/jdbc/ItJdbcMetadataSelfTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.jdbc;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.math.BigDecimal;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.ParameterMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.StringJoiner;
import java.util.UUID;
import org.apache.ignite.internal.client.proto.ProtocolVersion;
import org.apache.ignite.internal.jdbc.proto.event.JdbcColumnMeta;
import org.apache.ignite.internal.sql.engine.util.SqlTestUtils;
import org.apache.ignite.internal.type.NativeType;
import org.apache.ignite.jdbc.AbstractJdbcSelfTest;
import org.apache.ignite.jdbc.util.JdbcTestUtils;
import org.apache.ignite.sql.ColumnType;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
/**
* Metadata tests.
*/
public class ItJdbcMetadataSelfTest extends AbstractJdbcSelfTest {
/** Creates tables. */
@BeforeAll
public static void createTables() throws SQLException {
try (Statement stmt = conn.createStatement()) {
stmt.execute("CREATE SCHEMA IF NOT EXISTS PUBLIC;"
+ "CREATE SCHEMA IF NOT EXISTS META;"
+ "CREATE SCHEMA IF NOT EXISTS USER2;"
+ "CREATE SCHEMA IF NOT EXISTS \"user0\";"
+ "CREATE SCHEMA IF NOT EXISTS USER1;"
+ "CREATE TABLE person(name VARCHAR(32), age INT, orgid INT PRIMARY KEY);"
+ "CREATE TABLE organization(id INT PRIMARY KEY, name VARCHAR, bigdata DECIMAL(20, 10));"
+ "CREATE TABLE user1.table1(id INT PRIMARY KEY);"
+ "CREATE TABLE user2.\"table2\"(id INT PRIMARY KEY);"
+ "CREATE TABLE \"user0\".\"table0\"(\"id\" INT PRIMARY KEY);"
+ "CREATE TABLE \"user0\".table0(id INT PRIMARY KEY);"
+ "INSERT INTO person (orgid, name, age) VALUES (1, '111', 111);"
+ "INSERT INTO organization (id, name, bigdata) VALUES (1, 'AAA', 10);"
);
}
}
@Test
public void testNullValuesMetaData() throws Exception {
ResultSet rs = stmt.executeQuery(
"select NULL, substring(null, 1, 2)");
assertNotNull(rs);
ResultSetMetaData meta = rs.getMetaData();
assertNotNull(meta);
assertEquals(2, meta.getColumnCount());
assertEquals(Types.NULL, meta.getColumnType(1));
assertEquals("NULL", meta.getColumnTypeName(1));
assertEquals("java.lang.Void", meta.getColumnClassName(1));
assertEquals(Types.NULL, meta.getColumnType(2));
assertEquals("NULL", meta.getColumnTypeName(2));
assertEquals("java.lang.Void", meta.getColumnClassName(2));
}
@Test
public void testResultSetMetaData() throws Exception {
ResultSet rs = stmt.executeQuery(
"select p.name, o.id as orgId, p.age from PERSON p, ORGANIZATION o where p.orgId = o.id");
assertNotNull(rs);
ResultSetMetaData meta = rs.getMetaData();
assertNotNull(meta);
assertEquals(3, meta.getColumnCount());
assertEquals("Person".toUpperCase(), meta.getTableName(1).toUpperCase());
assertEquals("name".toUpperCase(), meta.getColumnName(1).toUpperCase());
assertEquals("name".toUpperCase(), meta.getColumnLabel(1).toUpperCase());
assertEquals(Types.VARCHAR, meta.getColumnType(1));
assertEquals("VARCHAR", meta.getColumnTypeName(1));
assertEquals("java.lang.String", meta.getColumnClassName(1));
assertEquals("Organization".toUpperCase(), meta.getTableName(2).toUpperCase());
assertEquals("id".toUpperCase(), meta.getColumnName(2).toUpperCase());
assertEquals("orgId".toUpperCase(), meta.getColumnLabel(2).toUpperCase());
assertEquals(Types.INTEGER, meta.getColumnType(2));
assertEquals("INTEGER", meta.getColumnTypeName(2));
assertEquals("java.lang.Integer", meta.getColumnClassName(2));
}
@Test
public void testDatabaseMetaDataColumns() throws Exception {
createMetaTable();
try {
DatabaseMetaData dbMeta = conn.getMetaData();
List<JdbcColumnMeta> columnsMeta = new ArrayList<>();
try (ResultSet rs = dbMeta.getColumns(null, "META", "TEST", null)) {
while (rs.next()) {
JdbcColumnMeta meta = new JdbcColumnMeta(
rs.getString("COLUMN_NAME"),
rs.getString("TABLE_SCHEM"),
rs.getString("TABLE_NAME"),
rs.getString("COLUMN_NAME"),
dataTypeToColumnType(rs.getInt("DATA_TYPE"), rs.getString("TYPE_NAME")),
rs.getShort("COLUMN_SIZE"),
rs.getShort("DECIMAL_DIGITS"),
"YES".equals(rs.getString("IS_NULLABLE"))
);
columnsMeta.add(meta);
}
}
ResultSetMetaData rsMeta = new JdbcResultSetMetadata(columnsMeta);
checkMeta(rsMeta);
} finally {
stmt.execute("DROP TABLE META.TEST;");
}
}
private ColumnType dataTypeToColumnType(int dataType, String typeName) {
ColumnType type = null;
switch (dataType) {
case Types.BOOLEAN:
type = ColumnType.BOOLEAN;
break;
case Types.TINYINT:
type = ColumnType.INT8;
break;
case Types.SMALLINT:
type = ColumnType.INT16;
break;
case Types.INTEGER:
type = ColumnType.INT32;
break;
case Types.BIGINT:
type = ColumnType.INT64;
break;
case Types.REAL:
type = ColumnType.FLOAT;
break;
case Types.DOUBLE:
type = ColumnType.DOUBLE;
break;
case Types.DECIMAL:
type = ColumnType.DECIMAL;
break;
case Types.DATE:
type = ColumnType.DATE;
break;
case Types.TIME:
type = ColumnType.TIME;
break;
case Types.TIMESTAMP:
type = ColumnType.DATETIME;
break;
case Types.OTHER:
if (typeName.equals("UUID")) {
type = ColumnType.UUID;
} else if (typeName.equals("TIMESTAMP WITH LOCAL TIME ZONE")) {
type = ColumnType.TIMESTAMP;
}
break;
case Types.VARCHAR:
type = ColumnType.STRING;
break;
case Types.VARBINARY:
type = ColumnType.BYTE_ARRAY;
break;
default:
break;
}
assertNotNull(type, "Not supported type " + dataType + " " + typeName);
return type;
}
@Test
public void testResultSetMetaDataColumns() throws Exception {
createMetaTable();
try {
ResultSet rs = stmt.executeQuery("SELECT * FROM META.TEST t");
assertNotNull(rs);
ResultSetMetaData meta = rs.getMetaData();
checkMeta(meta);
} finally {
stmt.execute("DROP TABLE META.TEST;");
}
}
private void checkMeta(ResultSetMetaData meta) throws SQLException {
assertNotNull(meta);
assertEquals(16, meta.getColumnCount());
assertEquals("META", meta.getSchemaName(1));
assertEquals("TEST", meta.getTableName(1).toUpperCase());
int i = 1;
checkMeta(meta, i++, "BOOLEAN_COL", Types.BOOLEAN, "BOOLEAN", Boolean.class);
checkMeta(meta, i++, "TINYINT_COL", Types.TINYINT, "TINYINT", Byte.class);
checkMeta(meta, i++, "SMALLINT_COL", Types.SMALLINT, "SMALLINT", Short.class);
checkMeta(meta, i++, "INTEGER_COL", Types.INTEGER, "INTEGER", Integer.class);
checkMeta(meta, i++, "BIGINT_COL", Types.BIGINT, "BIGINT", Long.class);
checkMeta(meta, i++, "REAL_COL", Types.REAL, "REAL", Float.class);
checkMeta(meta, i++, "DOUBLE_COL", Types.DOUBLE, "DOUBLE", Double.class);
checkMeta(meta, i++, "DECIMAL_COL", Types.DECIMAL, "DECIMAL", BigDecimal.class);
checkMeta(meta, i++, "DATE_COL", Types.DATE, "DATE", java.sql.Date.class);
checkMeta(meta, i++, "TIME_COL", Types.TIME, "TIME", java.sql.Time.class);
checkMeta(meta, i++, "TIMESTAMP_COL", Types.TIMESTAMP, "TIMESTAMP", java.sql.Timestamp.class);
checkMeta(meta, i++, "TIMESTAMP_WITH_LOCAL_TIME_ZONE_COL", Types.OTHER, "TIMESTAMP WITH LOCAL TIME ZONE", java.sql.Timestamp.class);
checkMeta(meta, i++, "UUID_COL", Types.OTHER, "UUID", UUID.class);
checkMeta(meta, i++, "VARCHAR_COL", Types.VARCHAR, "VARCHAR", String.class);
checkMeta(meta, i++, "VARBINARY_COL", Types.VARBINARY, "VARBINARY", byte[].class);
assertEquals(i, meta.getColumnCount(), "There are not checked columns");
}
private void checkMeta(ResultSetMetaData meta, int idx, String columnName, int expType, String expTypeName, Class expClass)
throws SQLException {
assertEquals(columnName, meta.getColumnName(idx).toUpperCase());
assertEquals(columnName, meta.getColumnLabel(idx).toUpperCase());
assertEquals(expType, meta.getColumnType(idx));
assertEquals(expTypeName, meta.getColumnTypeName(idx));
assertEquals(expClass.getName(), meta.getColumnClassName(idx));
}
private void createMetaTable() {
try {
StringJoiner joiner = new StringJoiner(",");
Arrays.stream(NativeType.nativeTypes())
.forEach(t -> {
String type = SqlTestUtils.toSqlType(t);
joiner.add(type.replace(' ', '_') + "_COL " + type);
});
joiner.add("id INT PRIMARY KEY");
stmt.executeUpdate("CREATE TABLE meta.test(" + joiner + ")");
} catch (SQLException ex) {
throw new RuntimeException(ex);
}
}
@Test
public void testGetTables() throws Exception {
DatabaseMetaData meta = conn.getMetaData();
// PUBLIC tables.
{
try (ResultSet rs = meta.getTables("IGNITE", "PUBLIC", "%", new String[]{"TABLE"})) {
assertNotNull(rs);
assertTrue(rs.next());
assertEquals("PUBLIC", rs.getString("TABLE_SCHEM"));
assertEquals("TABLE", rs.getString("TABLE_TYPE"));
assertEquals("ORGANIZATION", rs.getString("TABLE_NAME"));
assertTrue(rs.next());
assertEquals("PUBLIC", rs.getString("TABLE_SCHEM"));
assertEquals("TABLE", rs.getString("TABLE_TYPE"));
assertEquals("PERSON", rs.getString("TABLE_NAME"));
}
try (ResultSet rs = meta.getTables("IGNITE", "PUBLIC", "%", null)) {
assertNotNull(rs);
assertTrue(rs.next());
assertEquals("PUBLIC", rs.getString("TABLE_SCHEM"));
assertEquals("TABLE", rs.getString("TABLE_TYPE"));
assertEquals("ORGANIZATION", rs.getString("TABLE_NAME"));
assertTrue(rs.next());
assertEquals("PUBLIC", rs.getString("TABLE_SCHEM"));
assertEquals("TABLE", rs.getString("TABLE_TYPE"));
assertEquals("PERSON", rs.getString("TABLE_NAME"));
}
try (ResultSet rs = meta.getTables("IGNITE", "PUBLIC", "ORGANIZATION", new String[]{"VIEW"})) {
assertFalse(rs.next());
}
try (ResultSet rs = meta.getTables("IGNITE", "PUBLIC", "", new String[]{"WRONG"})) {
assertFalse(rs.next());
}
}
// All tables.
try (ResultSet rs = meta.getTables("IGNITE", "%", "%", new String[]{"TABLE"})) {
assertNotNull(rs);
assertTrue(rs.next());
assertEquals("PUBLIC", rs.getString("TABLE_SCHEM"));
assertEquals("TABLE", rs.getString("TABLE_TYPE"));
assertEquals("ORGANIZATION", rs.getString("TABLE_NAME"));
assertTrue(rs.next());
assertEquals("PUBLIC", rs.getString("TABLE_SCHEM"));
assertEquals("TABLE", rs.getString("TABLE_TYPE"));
assertEquals("PERSON", rs.getString("TABLE_NAME"));
assertTrue(rs.next());
assertEquals("USER1", rs.getString("TABLE_SCHEM"));
assertEquals("TABLE", rs.getString("TABLE_TYPE"));
assertEquals("TABLE1", rs.getString("TABLE_NAME"));
assertTrue(rs.next());
assertEquals("USER2", rs.getString("TABLE_SCHEM"));
assertEquals("TABLE", rs.getString("TABLE_TYPE"));
assertEquals("table2", rs.getString("TABLE_NAME"));
assertTrue(rs.next());
assertEquals("user0", rs.getString("TABLE_SCHEM"));
assertEquals("TABLE", rs.getString("TABLE_TYPE"));
assertEquals("TABLE0", rs.getString("TABLE_NAME"));
assertTrue(rs.next());
assertEquals("user0", rs.getString("TABLE_SCHEM"));
assertEquals("TABLE", rs.getString("TABLE_TYPE"));
assertEquals("table0", rs.getString("TABLE_NAME"));
assertFalse(rs.next());
}
// Case sensitive table name.
try (ResultSet rs = meta.getTables("IGNITE", "USER2", "table%", null)) {
assertTrue(rs.next());
assertEquals("USER2", rs.getString("TABLE_SCHEM"));
assertEquals("TABLE", rs.getString("TABLE_TYPE"));
assertEquals("table2", rs.getString("TABLE_NAME"));
assertFalse(rs.next());
}
// Case sensitive schema name.
try (ResultSet rs = meta.getTables("IGNITE", "user%", "%", null)) {
assertTrue(rs.next());
assertEquals("user0", rs.getString("TABLE_SCHEM"));
assertEquals("TABLE", rs.getString("TABLE_TYPE"));
assertEquals("TABLE0", rs.getString("TABLE_NAME"));
assertTrue(rs.next());
assertEquals("user0", rs.getString("TABLE_SCHEM"));
assertEquals("TABLE", rs.getString("TABLE_TYPE"));
assertEquals("table0", rs.getString("TABLE_NAME"));
assertFalse(rs.next());
}
// System views.
{
try (ResultSet rs = meta.getTables("IGNITE", "%", "TABLES", new String[]{"VIEW"})) {
assertTrue(rs.next());
assertEquals("SYSTEM", rs.getString("TABLE_SCHEM"));
assertEquals("VIEW", rs.getString("TABLE_TYPE"));
assertEquals("TABLES", rs.getString("TABLE_NAME"));
}
try (ResultSet rs = meta.getTables("IGNITE", "SYSTEM", "TABLES", new String[]{"VIEW"})) {
assertTrue(rs.next());
assertEquals("SYSTEM", rs.getString("TABLE_SCHEM"));
assertEquals("VIEW", rs.getString("TABLE_TYPE"));
assertEquals("TABLES", rs.getString("TABLE_NAME"));
}
try (ResultSet rs = meta.getTables("IGNITE", "%", "TABLES", new String[]{"TABLE"})) {
assertFalse(rs.next());
}
}
}
@Test
public void testGetColumns() throws Exception {
DatabaseMetaData meta = conn.getMetaData();
// Tables.
{
ResultSet rs = meta.getColumns("IGNITE", "PUBLIC", "%", "%");
checkOrgTableColumns(rs);
checkPersonTableColumns(rs);
assertFalse(rs.next());
rs = meta.getColumns("IGNITE", "PUBLIC", "PERSON", "%");
checkPersonTableColumns(rs);
assertFalse(rs.next());
rs = meta.getColumns(null, "PUBLIC", "PERSON", null);
checkPersonTableColumns(rs);
assertFalse(rs.next());
rs = meta.getColumns("IGNITE", "PUBLIC", "ORGANIZATION", "%");
checkOrgTableColumns(rs);
assertFalse(rs.next());
rs = meta.getColumns(null, "PUBLIC", "ORGANIZATION", null);
checkOrgTableColumns(rs);
assertFalse(rs.next());
rs = meta.getColumns(null, "USER%", "%", null);
checkUser1Columns(rs);
checkUser2Columns(rs);
assertFalse(rs.next());
// Case sensitive column name.
{
rs = meta.getColumns(null, "user%", "%", "id");
assertTrue(rs.next());
assertEquals("user0", rs.getString("TABLE_SCHEM"));
assertEquals("table0", rs.getString("TABLE_NAME"));
assertEquals("id", rs.getString("COLUMN_NAME"));
assertEquals(Types.INTEGER, rs.getInt("DATA_TYPE"));
assertEquals("INTEGER", rs.getString("TYPE_NAME"));
assertEquals(0, rs.getInt("NULLABLE"));
assertFalse(rs.next());
}
}
// System view.
{
ResultSet rs = meta.getColumns("IGNITE", "SYSTEM", "TRANSACTIONS", "TRANSACTION_%");
checkTxViewColumns(rs);
}
}
/**
* Checks organisation table column names and types.
*
* @param rs ResultSet.
* */
private static void checkOrgTableColumns(ResultSet rs) throws SQLException {
assertNotNull(rs);
assertTrue(rs.next());
assertEquals("ID", rs.getString("COLUMN_NAME"));
assertEquals(Types.INTEGER, rs.getInt("DATA_TYPE"));
assertEquals("INTEGER", rs.getString("TYPE_NAME"));
assertEquals(0, rs.getInt("NULLABLE"));
assertTrue(rs.next());
assertEquals("NAME", rs.getString("COLUMN_NAME"));
assertEquals(Types.VARCHAR, rs.getInt("DATA_TYPE"));
assertEquals("VARCHAR", rs.getString("TYPE_NAME"));
assertEquals(1, rs.getInt("NULLABLE"));
assertTrue(rs.next());
assertEquals("BIGDATA", rs.getString("COLUMN_NAME"));
assertEquals(Types.DECIMAL, rs.getInt("DATA_TYPE"));
assertEquals("DECIMAL", rs.getString("TYPE_NAME"));
assertEquals(1, rs.getInt("NULLABLE"));
assertEquals(10, rs.getInt("DECIMAL_DIGITS"));
assertEquals(20, rs.getInt("COLUMN_SIZE"));
}
/**
* Checks person table column names and types.
*
* @param rs ResultSet.
* */
private static void checkPersonTableColumns(ResultSet rs) throws SQLException {
assertNotNull(rs);
assertTrue(rs.next());
assertEquals("PUBLIC", rs.getString("TABLE_SCHEM"));
assertEquals("PERSON", rs.getString("TABLE_NAME"));
assertEquals("NAME", rs.getString("COLUMN_NAME"));
assertEquals(Types.VARCHAR, rs.getInt("DATA_TYPE"));
assertEquals("VARCHAR", rs.getString("TYPE_NAME"));
assertEquals(1, rs.getInt("NULLABLE"));
assertEquals(32, rs.getInt("COLUMN_SIZE"));
assertTrue(rs.next());
assertEquals("PUBLIC", rs.getString("TABLE_SCHEM"));
assertEquals("PERSON", rs.getString("TABLE_NAME"));
assertEquals("AGE", rs.getString("COLUMN_NAME"));
assertEquals(Types.INTEGER, rs.getInt("DATA_TYPE"));
assertEquals("INTEGER", rs.getString("TYPE_NAME"));
assertEquals(1, rs.getInt("NULLABLE"));
assertEquals(10, rs.getInt("COLUMN_SIZE"));
assertTrue(rs.next());
assertEquals("PUBLIC", rs.getString("TABLE_SCHEM"));
assertEquals("PERSON", rs.getString("TABLE_NAME"));
assertEquals("ORGID", rs.getString("COLUMN_NAME"));
assertEquals(Types.INTEGER, rs.getInt("DATA_TYPE"));
assertEquals("INTEGER", rs.getString("TYPE_NAME"));
assertEquals(0, rs.getInt("NULLABLE"));
assertEquals(10, rs.getInt("COLUMN_SIZE"));
}
private static void checkUser1Columns(ResultSet rs) throws SQLException {
assertTrue(rs.next());
assertEquals("USER1", rs.getString("TABLE_SCHEM"));
assertEquals("TABLE1", rs.getString("TABLE_NAME"));
assertEquals("ID", rs.getString("COLUMN_NAME"));
assertEquals(Types.INTEGER, rs.getInt("DATA_TYPE"));
assertEquals("INTEGER", rs.getString("TYPE_NAME"));
assertEquals(0, rs.getInt("NULLABLE"));
}
private static void checkUser2Columns(ResultSet rs) throws SQLException {
assertTrue(rs.next());
assertEquals("USER2", rs.getString("TABLE_SCHEM"));
assertEquals("table2", rs.getString("TABLE_NAME"));
assertEquals("ID", rs.getString("COLUMN_NAME"));
assertEquals(Types.INTEGER, rs.getInt("DATA_TYPE"));
assertEquals("INTEGER", rs.getString("TYPE_NAME"));
assertEquals(0, rs.getInt("NULLABLE"));
}
private static void checkTxViewColumns(ResultSet rs) throws SQLException {
assertTrue(rs.next());
assertEquals("SYSTEM", rs.getString("TABLE_SCHEM"));
assertEquals("TRANSACTIONS", rs.getString("TABLE_NAME"));
assertEquals("TRANSACTION_STATE", rs.getString("COLUMN_NAME"));
assertEquals(Types.VARCHAR, rs.getInt("DATA_TYPE"));
assertEquals("VARCHAR", rs.getString("TYPE_NAME"));
assertEquals(1, rs.getInt("NULLABLE"));
assertEquals(64, rs.getInt("COLUMN_SIZE"));
assertTrue(rs.next());
assertEquals("SYSTEM", rs.getString("TABLE_SCHEM"));
assertEquals("TRANSACTIONS", rs.getString("TABLE_NAME"));
assertEquals("TRANSACTION_ID", rs.getString("COLUMN_NAME"));
assertEquals(Types.VARCHAR, rs.getInt("DATA_TYPE"));
assertEquals("VARCHAR", rs.getString("TYPE_NAME"));
assertEquals(1, rs.getInt("NULLABLE"));
assertEquals(64, rs.getInt("COLUMN_SIZE"));
assertTrue(rs.next());
assertEquals("SYSTEM", rs.getString("TABLE_SCHEM"));
assertEquals("TRANSACTIONS", rs.getString("TABLE_NAME"));
assertEquals("TRANSACTION_START_TIME", rs.getString("COLUMN_NAME"));
assertEquals(Types.OTHER, rs.getInt("DATA_TYPE"));
assertEquals("TIMESTAMP WITH LOCAL TIME ZONE", rs.getString("TYPE_NAME"));
assertEquals(1, rs.getInt("NULLABLE"));
assertEquals(9, rs.getInt("COLUMN_SIZE"));
assertTrue(rs.next());
assertEquals("SYSTEM", rs.getString("TABLE_SCHEM"));
assertEquals("TRANSACTIONS", rs.getString("TABLE_NAME"));
assertEquals("TRANSACTION_TYPE", rs.getString("COLUMN_NAME"));
assertEquals(Types.VARCHAR, rs.getInt("DATA_TYPE"));
assertEquals("VARCHAR", rs.getString("TYPE_NAME"));
assertEquals(1, rs.getInt("NULLABLE"));
assertEquals(64, rs.getInt("COLUMN_SIZE"));
assertTrue(rs.next());
assertEquals("SYSTEM", rs.getString("TABLE_SCHEM"));
assertEquals("TRANSACTIONS", rs.getString("TABLE_NAME"));
assertEquals("TRANSACTION_PRIORITY", rs.getString("COLUMN_NAME"));
assertEquals(Types.VARCHAR, rs.getInt("DATA_TYPE"));
assertEquals("VARCHAR", rs.getString("TYPE_NAME"));
assertEquals(1, rs.getInt("NULLABLE"));
assertEquals(64, rs.getInt("COLUMN_SIZE"));
assertFalse(rs.next());
}
/**
* Check JDBC support flags.
*/
@Test
public void testCheckSupports() throws SQLException {
DatabaseMetaData meta = conn.getMetaData();
assertTrue(meta.supportsANSI92EntryLevelSQL());
assertTrue(meta.supportsAlterTableWithAddColumn());
assertTrue(meta.supportsAlterTableWithDropColumn());
assertTrue(meta.nullPlusNonNullIsNull());
}
@Test
public void testVersions() throws Exception {
assertEquals(conn.getMetaData().getDatabaseProductVersion(), ProtocolVersion.LATEST_VER.toString(),
"Unexpected ignite database product version.");
assertEquals(conn.getMetaData().getDriverVersion(), ProtocolVersion.LATEST_VER.toString(),
"Unexpected ignite driver version.");
}
@Test
public void testSchemasMetadata() throws Exception {
try (ResultSet rs = conn.getMetaData().getSchemas()) {
List<String> schemas = new ArrayList<>();
while (rs.next()) {
schemas.add(rs.getString(1));
}
assertEquals(List.of("META", "PUBLIC", "SYSTEM", "USER1", "USER2", "user0"), schemas);
}
try (ResultSet rs = conn.getMetaData().getSchemas("IGNITE", "USER%")) {
List<String> schemas = new ArrayList<>();
while (rs.next()) {
schemas.add(rs.getString(1));
}
assertEquals(List.of("USER1", "USER2"), schemas);
}
}
@Test
public void testEmptySchemasMetadata() throws Exception {
ResultSet rs = conn.getMetaData().getSchemas(null, "qqq");
assertFalse(rs.next(), "Empty result set is expected");
}
@Test
public void testPrimaryKeyMetadata() throws Exception {
ResultSet rs = conn.getMetaData().getPrimaryKeys(null, "PUBLIC", "PERSON");
int cnt = 0;
while (rs.next()) {
assertEquals("ORGID", rs.getString("COLUMN_NAME"));
cnt++;
}
assertEquals(1, cnt);
}
@Test
public void testGetAllPrimaryKeys() throws Exception {
ResultSet rs = conn.getMetaData().getPrimaryKeys(null, null, null);
List<String> expectedPks = Arrays.asList(
"PUBLIC.ORGANIZATION.PK_ORGANIZATION.ID",
"PUBLIC.PERSON.PK_PERSON.ORGID",
"USER1.TABLE1.PK_TABLE1.ID",
"USER2.table2.PK_table2.ID",
"user0.TABLE0.PK_TABLE0.ID",
"user0.table0.PK_table0.id"
);
List<String> actualPks = new ArrayList<>(expectedPks.size());
while (rs.next()) {
actualPks.add(rs.getString("TABLE_SCHEM")
+ '.' + rs.getString("TABLE_NAME")
+ '.' + rs.getString("PK_NAME")
+ '.' + rs.getString("COLUMN_NAME"));
}
assertEquals(expectedPks, actualPks, "Metadata contains unexpected primary keys info.");
}
@Test
public void testInvalidCatalog() throws Exception {
DatabaseMetaData meta = conn.getMetaData();
ResultSet rs = meta.getSchemas("q", null);
assertFalse(rs.next(), "Results must be empty");
rs = meta.getTables("q", null, null, null);
assertFalse(rs.next(), "Results must be empty");
rs = meta.getColumns("q", null, null, null);
assertFalse(rs.next(), "Results must be empty");
rs = meta.getIndexInfo("q", null, null, false, false);
assertFalse(rs.next(), "Results must be empty");
rs = meta.getPrimaryKeys("q", null, null);
assertFalse(rs.next(), "Results must be empty");
}
@Test
public void testGetTableTypes() throws Exception {
DatabaseMetaData meta = conn.getMetaData();
ResultSet rs = meta.getTableTypes();
assertTrue(rs.next());
assertEquals("TABLE", rs.getString("TABLE_TYPE"));
assertFalse(rs.next());
}
@Test
@Disabled("https://issues.apache.org/jira/browse/IGNITE-16203")
public void testParametersMetadata() throws Exception {
// Perform checks few times due to query/plan caching.
for (int i = 0; i < 3; i++) {
// No parameters statement.
try (Connection conn = DriverManager.getConnection(URL)) {
conn.setSchema("\"pers\"");
PreparedStatement noParams = conn.prepareStatement("select * from Person;");
ParameterMetaData params = noParams.getParameterMetaData();
assertEquals(0, params.getParameterCount(), "Parameters should be empty.");
}
// Selects.
try (Connection conn = DriverManager.getConnection(URL)) {
conn.setSchema("\"pers\"");
PreparedStatement selectStmt = conn.prepareStatement("select orgId from Person p where p.name > ? and p.orgId > ?");
ParameterMetaData meta = selectStmt.getParameterMetaData();
assertNotNull(meta);
assertEquals(2, meta.getParameterCount());
assertEquals(Types.VARCHAR, meta.getParameterType(1));
assertEquals(ParameterMetaData.parameterNullableUnknown, meta.isNullable(1));
assertEquals(Integer.MAX_VALUE, meta.getPrecision(1));
assertEquals(Types.INTEGER, meta.getParameterType(2));
assertEquals(ParameterMetaData.parameterNullableUnknown, meta.isNullable(2));
}
// Updates.
try (Connection conn = DriverManager.getConnection(URL)) {
conn.setSchema("\"pers\"");
PreparedStatement updateStmt = conn.prepareStatement("update Person p set orgId = 42 where p.name > ? and p.orgId > ?");
ParameterMetaData meta = updateStmt.getParameterMetaData();
assertNotNull(meta);
assertEquals(2, meta.getParameterCount());
assertEquals(Types.VARCHAR, meta.getParameterType(1));
assertEquals(ParameterMetaData.parameterNullableUnknown, meta.isNullable(1));
assertEquals(Integer.MAX_VALUE, meta.getPrecision(1));
assertEquals(Types.INTEGER, meta.getParameterType(2));
assertEquals(ParameterMetaData.parameterNullableUnknown, meta.isNullable(2));
}
// Multistatement
try (Connection conn = DriverManager.getConnection(URL)) {
conn.setSchema("\"pers\"");
PreparedStatement updateStmt = conn.prepareStatement(
"update Person p set orgId = 42 where p.name > ? and p.orgId > ?;"
+ "select orgId from Person p where p.name > ? and p.orgId > ?");
ParameterMetaData meta = updateStmt.getParameterMetaData();
assertNotNull(meta);
assertEquals(4, meta.getParameterCount());
assertEquals(Types.VARCHAR, meta.getParameterType(1));
assertEquals(ParameterMetaData.parameterNullableUnknown, meta.isNullable(1));
assertEquals(Integer.MAX_VALUE, meta.getPrecision(1));
assertEquals(Types.INTEGER, meta.getParameterType(2));
assertEquals(ParameterMetaData.parameterNullableUnknown, meta.isNullable(2));
assertEquals(Types.VARCHAR, meta.getParameterType(3));
assertEquals(ParameterMetaData.parameterNullableUnknown, meta.isNullable(3));
assertEquals(Integer.MAX_VALUE, meta.getPrecision(3));
assertEquals(Types.INTEGER, meta.getParameterType(4));
assertEquals(ParameterMetaData.parameterNullableUnknown, meta.isNullable(4));
}
}
}
/**
* Check that parameters metadata throws correct exception on non-parsable statement.
*/
@Test
@Disabled("https://issues.apache.org/jira/browse/IGNITE-16203")
public void testParametersMetadataNegative() throws Exception {
try (Connection conn = DriverManager.getConnection(URL)) {
conn.setSchema("\"pers\"");
PreparedStatement notCorrect = conn.prepareStatement("select * from NotExistingTable;");
JdbcTestUtils.assertThrowsSqlException("Table NOTEXISTINGTABLE not found", notCorrect::getParameterMetaData);
}
}
/**
* Negative scenarios for catalog name. Perform metadata lookups, that use incorrect catalog names.
*/
@Test
public void testCatalogWithNotExistingName() throws SQLException {
checkNoEntitiesFoundForCatalog("");
checkNoEntitiesFoundForCatalog("NOT_EXISTING_CATALOG");
}
// IgniteCustomType: Add JDBC metadata test for your type.
/**
* Check that lookup in the metadata have been performed using specified catalog name (that is neither {@code null} nor correct catalog
* name), empty result set is returned.
*
* @param invalidCat catalog name that is not either
*/
private void checkNoEntitiesFoundForCatalog(String invalidCat) throws SQLException {
DatabaseMetaData meta = conn.getMetaData();
// Intention: we set the other arguments that way, the values to have as many results as possible.
assertIsEmpty(meta.getTables(invalidCat, null, "%", new String[]{"TABLE"}));
assertIsEmpty(meta.getColumns(invalidCat, null, "%", "%"));
assertIsEmpty(meta.getColumnPrivileges(invalidCat, "pers", "PERSON", "%"));
assertIsEmpty(meta.getTablePrivileges(invalidCat, null, "%"));
assertIsEmpty(meta.getPrimaryKeys(invalidCat, "pers", "PERSON"));
assertIsEmpty(meta.getImportedKeys(invalidCat, "pers", "PERSON"));
assertIsEmpty(meta.getExportedKeys(invalidCat, "pers", "PERSON"));
// meta.getCrossReference(...) doesn't make sense because we don't have FK constraint.
assertIsEmpty(meta.getIndexInfo(invalidCat, null, "%", false, true));
assertIsEmpty(meta.getSuperTables(invalidCat, "%", "%"));
assertIsEmpty(meta.getSchemas(invalidCat, null));
assertIsEmpty(meta.getPseudoColumns(invalidCat, null, "%", ""));
}
/**
* Assert that specified ResultSet contains no rows.
*
* @param rs result set to check.
* @throws SQLException on error.
*/
private static void assertIsEmpty(ResultSet rs) throws SQLException {
try (rs) {
boolean empty = !rs.next();
assertTrue(empty, "Result should be empty because invalid catalog is specified.");
}
}
}
|
apache/ignite | 35,891 | modules/binary/impl/src/main/java/org/apache/ignite/internal/marshaller/optimized/OptimizedClassDescriptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.marshaller.optimized;
import java.io.Externalizable;
import java.io.IOException;
import java.io.NotSerializableException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.ObjectStreamField;
import java.io.Serializable;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.Proxy;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;
import java.util.concurrent.ConcurrentMap;
import org.apache.ignite.internal.util.CommonUtils;
import org.apache.ignite.internal.util.GridUnsafe;
import org.apache.ignite.marshaller.MarshallerContext;
import org.apache.ignite.marshaller.MarshallerExclusions;
import org.apache.ignite.marshaller.Marshallers;
import static java.lang.reflect.Modifier.isFinal;
import static java.lang.reflect.Modifier.isPrivate;
import static java.lang.reflect.Modifier.isStatic;
import static java.lang.reflect.Modifier.isTransient;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.ARRAY_LIST;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.BOOLEAN;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.BOOLEAN_ARR;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.BYTE;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.BYTE_ARR;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.CHAR;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.CHAR_ARR;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.CLS;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.DATE;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.DOUBLE;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.DOUBLE_ARR;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.ENUM;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.EXTERNALIZABLE;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.FLOAT;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.FLOAT_ARR;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.HASH_MAP;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.HASH_SET;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.HASH_SET_MAP_OFF;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.INT;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.INT_ARR;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.LINKED_HASH_MAP;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.LINKED_HASH_SET;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.LINKED_LIST;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.LONG;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.LONG_ARR;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.OBJ_ARR;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.PROPS;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.PROXY;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.SERIALIZABLE;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.SHORT;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.SHORT_ARR;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.STR;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.UUID;
import static org.apache.ignite.internal.marshaller.optimized.OptimizedMarshallerUtils.computeSerialVersionUid;
import static org.apache.ignite.internal.util.CommonUtils.isLambda;
/**
* Class descriptor.
*/
class OptimizedClassDescriptor {
/** Class. */
private final Class<?> cls;
/** Context. */
private final MarshallerContext ctx;
/** */
private ConcurrentMap<Class, OptimizedClassDescriptor> clsMap;
/** ID mapper. */
private final OptimizedMarshallerIdMapper mapper;
/** Class name. */
private final String name;
/** Type ID. */
private final int typeId;
/** Short ID. */
private final short checksum;
/** Class type. */
private int type;
/** Primitive flag. */
private boolean isPrimitive;
/** Enum flag. */
private boolean isEnum;
/** Serializable flag. */
private boolean isSerial;
/** Excluded flag. */
private boolean excluded;
/** {@code True} if descriptor is for {@link Class}. */
private boolean isCls;
/** Enumeration values. */
private Object[] enumVals;
/** Constructor. */
private Constructor<?> constructor;
/** Fields. */
private Fields fields;
/** {@code writeObject} methods. */
private List<Method> writeObjMtds;
/** {@code writeReplace} method. */
private Method writeReplaceMtd;
/** {@code readObject} methods. */
private List<Method> readObjMtds;
/** {@code readResolve} method. */
private Method readResolveMtd;
/** Defaults field offset. */
private long dfltsFieldOff;
/** Load factor field offset. */
private long loadFactorFieldOff;
/** Access order field offset. */
private long accessOrderFieldOff;
/** Proxy interfaces. */
private Class<?>[] proxyIntfs;
/**
* Creates descriptor for class.
*
* @param typeId Type ID.
* @param clsMap Class descriptors by class map.
* @param cls Class.
* @param ctx Context.
* @param mapper ID mapper.
* @throws IOException In case of error.
*/
@SuppressWarnings("ForLoopReplaceableByForEach")
OptimizedClassDescriptor(Class<?> cls,
int typeId,
ConcurrentMap<Class, OptimizedClassDescriptor> clsMap,
MarshallerContext ctx,
OptimizedMarshallerIdMapper mapper)
throws IOException {
this(
cls,
typeId,
clsMap,
ctx,
mapper,
MarshallerExclusions.isExcluded(cls)
);
}
/**
* Creates descriptor for class.
*
* @param typeId Type ID.
* @param clsMap Class descriptors by class map.
* @param cls Class.
* @param ctx Context.
* @param mapper ID mapper.
* @throws IOException In case of error.
*/
@SuppressWarnings("ForLoopReplaceableByForEach")
OptimizedClassDescriptor(Class<?> cls,
int typeId,
ConcurrentMap<Class, OptimizedClassDescriptor> clsMap,
MarshallerContext ctx,
OptimizedMarshallerIdMapper mapper,
boolean excluded)
throws IOException {
this.cls = cls;
this.typeId = typeId;
this.clsMap = clsMap;
this.ctx = ctx;
this.mapper = mapper;
name = cls.getName();
this.excluded = excluded;
if (!excluded) {
Class<?> parent;
if (cls == byte.class || cls == Byte.class) {
type = BYTE;
isPrimitive = true;
}
else if (cls == short.class || cls == Short.class) {
type = SHORT;
isPrimitive = true;
}
else if (cls == int.class || cls == Integer.class) {
type = INT;
isPrimitive = true;
}
else if (cls == long.class || cls == Long.class) {
type = LONG;
isPrimitive = true;
}
else if (cls == float.class || cls == Float.class) {
type = FLOAT;
isPrimitive = true;
}
else if (cls == double.class || cls == Double.class) {
type = DOUBLE;
isPrimitive = true;
}
else if (cls == char.class || cls == Character.class) {
type = CHAR;
isPrimitive = true;
}
else if (cls == boolean.class || cls == Boolean.class) {
type = BOOLEAN;
isPrimitive = true;
}
else if (cls == byte[].class)
type = BYTE_ARR;
else if (cls == short[].class)
type = SHORT_ARR;
else if (cls == int[].class)
type = INT_ARR;
else if (cls == long[].class)
type = LONG_ARR;
else if (cls == float[].class)
type = FLOAT_ARR;
else if (cls == double[].class)
type = DOUBLE_ARR;
else if (cls == char[].class)
type = CHAR_ARR;
else if (cls == boolean[].class)
type = BOOLEAN_ARR;
else if (cls.isArray())
type = OBJ_ARR;
else if (cls == String.class)
type = STR;
else if (cls.isEnum()) {
type = ENUM;
isEnum = true;
enumVals = cls.getEnumConstants();
}
// Support for enum constants, based on anonymous children classes.
else if ((parent = cls.getSuperclass()) != null && parent.isEnum()) {
type = ENUM;
isEnum = true;
enumVals = parent.getEnumConstants();
}
else if (cls == UUID.class)
type = UUID;
else if (cls == Properties.class) {
type = PROPS;
try {
dfltsFieldOff = GridUnsafe.objectFieldOffset(Properties.class.getDeclaredField("defaults"));
}
catch (NoSuchFieldException e) {
throw new IOException(e);
}
}
else if (cls == ArrayList.class)
type = ARRAY_LIST;
else if (cls == HashMap.class) {
type = HASH_MAP;
try {
loadFactorFieldOff = GridUnsafe.objectFieldOffset(HashMap.class.getDeclaredField("loadFactor"));
}
catch (NoSuchFieldException e) {
throw new IOException(e);
}
}
else if (cls == HashSet.class) {
type = HASH_SET;
try {
loadFactorFieldOff = GridUnsafe.objectFieldOffset(HashMap.class.getDeclaredField("loadFactor"));
}
catch (NoSuchFieldException e) {
throw new IOException(e);
}
}
else if (cls == LinkedList.class)
type = LINKED_LIST;
else if (cls == LinkedHashMap.class) {
type = LINKED_HASH_MAP;
try {
loadFactorFieldOff =
GridUnsafe.objectFieldOffset(HashMap.class.getDeclaredField("loadFactor"));
accessOrderFieldOff =
GridUnsafe.objectFieldOffset(LinkedHashMap.class.getDeclaredField("accessOrder"));
}
catch (NoSuchFieldException e) {
throw new IOException(e);
}
}
else if (cls == LinkedHashSet.class) {
type = LINKED_HASH_SET;
try {
loadFactorFieldOff = GridUnsafe.objectFieldOffset(HashMap.class.getDeclaredField("loadFactor"));
}
catch (NoSuchFieldException e) {
throw new IOException(e);
}
}
else if (cls == Date.class)
type = DATE;
else if (cls == Class.class) {
type = CLS;
isCls = true;
}
else if (Proxy.class.isAssignableFrom(cls)) {
type = PROXY;
proxyIntfs = cls.getInterfaces();
}
else {
Class<?> c = cls;
while ((writeReplaceMtd == null || readResolveMtd == null) && c != null && !c.equals(Object.class)) {
if (writeReplaceMtd == null) {
try {
writeReplaceMtd = c.getDeclaredMethod("writeReplace");
if (!isStatic(writeReplaceMtd.getModifiers()) &&
!(isPrivate(writeReplaceMtd.getModifiers()) && c != cls) &&
writeReplaceMtd.getReturnType().equals(Object.class))
writeReplaceMtd.setAccessible(true);
else
// Set method back to null if it has incorrect signature.
writeReplaceMtd = null;
}
catch (NoSuchMethodException ignored) {
// No-op.
}
}
if (readResolveMtd == null) {
try {
readResolveMtd = c.getDeclaredMethod("readResolve");
if (!isStatic(readResolveMtd.getModifiers()) &&
!(isPrivate(readResolveMtd.getModifiers()) && c != cls) &&
readResolveMtd.getReturnType().equals(Object.class))
readResolveMtd.setAccessible(true);
else
// Set method back to null if it has incorrect signature.
readResolveMtd = null;
}
catch (NoSuchMethodException ignored) {
// No-op.
}
}
c = c.getSuperclass();
}
if (Externalizable.class.isAssignableFrom(cls)) {
type = EXTERNALIZABLE;
try {
constructor = !Modifier.isStatic(cls.getModifiers()) && cls.getDeclaringClass() != null ?
cls.getDeclaredConstructor(cls.getDeclaringClass()) :
cls.getDeclaredConstructor();
constructor.setAccessible(true);
}
catch (NoSuchMethodException e) {
throw new IOException("Externalizable class doesn't have default constructor: " + cls, e);
}
}
else {
type = SERIALIZABLE;
isSerial = Serializable.class.isAssignableFrom(cls);
writeObjMtds = new ArrayList<>();
readObjMtds = new ArrayList<>();
List<ClassFields> fields = new ArrayList<>();
if (isLambda(cls)) {
if (!isSerial)
throw new NotSerializableException("Lambda is not serializable: " + cls);
}
else {
for (c = cls; c != null && !c.equals(Object.class); c = c.getSuperclass()) {
Method mtd;
try {
mtd = c.getDeclaredMethod("writeObject", ObjectOutputStream.class);
int mod = mtd.getModifiers();
if (!isStatic(mod) && isPrivate(mod) && mtd.getReturnType() == Void.TYPE)
mtd.setAccessible(true);
else
// Set method back to null if it has incorrect signature.
mtd = null;
}
catch (NoSuchMethodException ignored) {
mtd = null;
}
writeObjMtds.add(mtd);
try {
mtd = c.getDeclaredMethod("readObject", ObjectInputStream.class);
int mod = mtd.getModifiers();
if (!isStatic(mod) && isPrivate(mod) && mtd.getReturnType() == Void.TYPE)
mtd.setAccessible(true);
else
// Set method back to null if it has incorrect signature.
mtd = null;
}
catch (NoSuchMethodException ignored) {
mtd = null;
}
readObjMtds.add(mtd);
Field[] clsFields0 = c.getDeclaredFields();
Map<String, Field> fieldNames = new HashMap<>();
for (Field f : clsFields0)
fieldNames.put(f.getName(), f);
List<FieldInfo> clsFields = new ArrayList<>(clsFields0.length);
boolean hasSerialPersistentFields = false;
try {
Field serFieldsDesc = c.getDeclaredField("serialPersistentFields");
int mod = serFieldsDesc.getModifiers();
if (serFieldsDesc.getType() == ObjectStreamField[].class &&
isPrivate(mod) && isStatic(mod) && isFinal(mod)) {
hasSerialPersistentFields = true;
serFieldsDesc.setAccessible(true);
ObjectStreamField[] serFields = (ObjectStreamField[])serFieldsDesc.get(null);
for (int i = 0; i < serFields.length; i++) {
ObjectStreamField serField = serFields[i];
FieldInfo fieldInfo;
if (!fieldNames.containsKey(serField.getName())) {
fieldInfo = new FieldInfo(null,
serField.getName(),
-1,
fieldType(serField.getType()));
}
else {
Field f = fieldNames.get(serField.getName());
fieldInfo = new FieldInfo(f,
serField.getName(),
GridUnsafe.objectFieldOffset(f),
fieldType(serField.getType()));
}
clsFields.add(fieldInfo);
}
}
}
catch (NoSuchFieldException ignored) {
// No-op.
}
catch (IllegalAccessException e) {
throw new IOException("Failed to get value of 'serialPersistentFields' field in class: " +
cls.getName(), e);
}
if (!hasSerialPersistentFields) {
for (int i = 0; i < clsFields0.length; i++) {
Field f = clsFields0[i];
int mod = f.getModifiers();
if (!isStatic(mod) && !isTransient(mod)) {
FieldInfo fieldInfo = new FieldInfo(f, f.getName(),
GridUnsafe.objectFieldOffset(f), fieldType(f.getType()));
clsFields.add(fieldInfo);
}
}
}
Collections.sort(clsFields, new Comparator<FieldInfo>() {
@Override public int compare(FieldInfo t1, FieldInfo t2) {
return t1.name().compareTo(t2.name());
}
});
fields.add(new ClassFields(clsFields));
}
}
Collections.reverse(writeObjMtds);
Collections.reverse(readObjMtds);
Collections.reverse(fields);
this.fields = new Fields(fields);
}
}
}
checksum = computeSerialVersionUid(cls, fields != null ? fields.ownFields() : null);
}
/**
* @return Excluded flag.
*/
boolean excluded() {
return excluded;
}
/**
* @return Class.
*/
Class<?> describedClass() {
return cls;
}
/**
* @return Primitive flag.
*/
boolean isPrimitive() {
return isPrimitive;
}
/**
* @return Enum flag.
*/
boolean isEnum() {
return isEnum;
}
/**
* @return {@code True} if descriptor is for {@link Class}.
*/
boolean isClass() {
return isCls;
}
/**
* @return {@code True} if descriptor is for {@link Proxy}.
*/
boolean isProxy() {
return type == PROXY;
}
/**
* Replaces object.
*
* @param obj Object.
* @return Replaced object or {@code null} if there is no {@code writeReplace} method.
* @throws IOException In case of error.
*/
Object replace(Object obj) throws IOException {
if (writeReplaceMtd != null) {
try {
return writeReplaceMtd.invoke(obj);
}
catch (IllegalAccessException | InvocationTargetException e) {
throw new IOException(e);
}
}
return obj;
}
/**
* Writes object to stream.
*
* @param out Output stream.
* @param obj Object.
* @throws IOException In case of error.
*/
void write(OptimizedObjectOutputStream out, Object obj) throws IOException {
out.write(type);
switch (type) {
case BYTE:
out.writeByte((Byte)obj);
break;
case SHORT:
out.writeShort((Short)obj);
break;
case INT:
out.writeInt((Integer)obj);
break;
case LONG:
out.writeLong((Long)obj);
break;
case FLOAT:
out.writeFloat((Float)obj);
break;
case DOUBLE:
out.writeDouble((Double)obj);
break;
case CHAR:
out.writeChar((Character)obj);
break;
case BOOLEAN:
out.writeBoolean((Boolean)obj);
break;
case BYTE_ARR:
out.writeByteArray((byte[])obj);
break;
case SHORT_ARR:
out.writeShortArray((short[])obj);
break;
case INT_ARR:
out.writeIntArray((int[])obj);
break;
case LONG_ARR:
out.writeLongArray((long[])obj);
break;
case FLOAT_ARR:
out.writeFloatArray((float[])obj);
break;
case DOUBLE_ARR:
out.writeDoubleArray((double[])obj);
break;
case CHAR_ARR:
out.writeCharArray((char[])obj);
break;
case BOOLEAN_ARR:
out.writeBooleanArray((boolean[])obj);
break;
case OBJ_ARR:
OptimizedClassDescriptor compDesc = OptimizedMarshallerUtils.classDescriptor(clsMap,
obj.getClass().getComponentType(),
Marshallers.USE_CACHE.get(),
ctx,
mapper);
compDesc.writeTypeData(out);
out.writeArray((Object[])obj);
break;
case STR:
out.writeString((String)obj);
break;
case UUID:
out.writeUuid((UUID)obj);
break;
case PROPS:
out.writeProperties((Properties)obj, dfltsFieldOff);
break;
case ARRAY_LIST:
out.writeArrayList((ArrayList<?>)obj);
break;
case HASH_MAP:
out.writeHashMap((HashMap<?, ?>)obj, loadFactorFieldOff, false);
break;
case HASH_SET:
out.writeHashSet((HashSet<?>)obj, HASH_SET_MAP_OFF, loadFactorFieldOff);
break;
case LINKED_LIST:
out.writeLinkedList((LinkedList<?>)obj);
break;
case LINKED_HASH_MAP:
out.writeLinkedHashMap((LinkedHashMap<?, ?>)obj, loadFactorFieldOff, accessOrderFieldOff, false);
break;
case LINKED_HASH_SET:
out.writeLinkedHashSet((LinkedHashSet<?>)obj, HASH_SET_MAP_OFF, loadFactorFieldOff);
break;
case DATE:
out.writeDate((Date)obj);
break;
case CLS:
OptimizedClassDescriptor clsDesc = OptimizedMarshallerUtils.classDescriptor(
clsMap, (Class<?>)obj, Marshallers.USE_CACHE.get(), ctx, mapper);
clsDesc.writeTypeData(out);
break;
case PROXY:
out.writeInt(proxyIntfs.length);
for (Class<?> intf : proxyIntfs) {
OptimizedClassDescriptor intfDesc = OptimizedMarshallerUtils.classDescriptor(
clsMap, intf, Marshallers.USE_CACHE.get(), ctx, mapper);
intfDesc.writeTypeData(out);
}
InvocationHandler ih = Proxy.getInvocationHandler(obj);
assert ih != null;
out.writeObject(ih);
break;
case ENUM:
writeTypeData(out);
out.writeInt(((Enum)obj).ordinal());
break;
case EXTERNALIZABLE:
writeTypeData(out);
out.writeShort(checksum);
out.writeExternalizable(obj);
break;
case SERIALIZABLE:
if (out.requireSerializable() && !isSerial)
throw new NotSerializableException("Must implement java.io.Serializable or " +
"set OptimizedMarshaller.setRequireSerializable() to false " +
"(note that performance may degrade if object is not Serializable): " + name);
writeTypeData(out);
out.writeShort(checksum);
out.writeSerializable(obj, writeObjMtds, fields);
break;
default:
throw new IllegalStateException("Invalid class type: " + type);
}
}
/**
* @param out Output stream.
* @throws IOException In case of error.
*/
void writeTypeData(OptimizedObjectOutputStream out) throws IOException {
out.writeInt(typeId);
if (typeId == 0)
out.writeUTF(name);
}
/**
* Reads object from stream.
*
* @param in Input stream.
* @return Object.
* @throws ClassNotFoundException If class not found.
* @throws IOException In case of error.
*/
Object read(OptimizedObjectInputStream in) throws ClassNotFoundException, IOException {
switch (type) {
case ENUM:
return enumVals[in.readInt()];
case EXTERNALIZABLE:
verifyChecksum(in.readShort());
return in.readExternalizable(constructor, readResolveMtd);
case SERIALIZABLE:
verifyChecksum(in.readShort());
return in.readSerializable(cls, readObjMtds, readResolveMtd, fields);
default:
assert false : "Unexpected type: " + type;
return null;
}
}
/**
* @param checksum Checksum.
* @throws ClassNotFoundException If checksum is wrong.
* @throws IOException In case of error.
*/
private void verifyChecksum(short checksum) throws ClassNotFoundException, IOException {
if (checksum != this.checksum)
throw new ClassNotFoundException("Optimized stream class checksum mismatch " +
"(is same version of marshalled class present on all nodes?) " +
"[expected=" + this.checksum + ", actual=" + checksum + ", cls=" + cls + ']');
}
/**
* @param cls Class.
* @return Type.
*/
private OptimizedFieldType fieldType(Class<?> cls) {
OptimizedFieldType type;
if (cls == byte.class)
type = OptimizedFieldType.BYTE;
else if (cls == short.class)
type = OptimizedFieldType.SHORT;
else if (cls == int.class)
type = OptimizedFieldType.INT;
else if (cls == long.class)
type = OptimizedFieldType.LONG;
else if (cls == float.class)
type = OptimizedFieldType.FLOAT;
else if (cls == double.class)
type = OptimizedFieldType.DOUBLE;
else if (cls == char.class)
type = OptimizedFieldType.CHAR;
else if (cls == boolean.class)
type = OptimizedFieldType.BOOLEAN;
else
type = OptimizedFieldType.OTHER;
return type;
}
/**
* Information about one field.
*/
@SuppressWarnings("PackageVisibleInnerClass")
static class FieldInfo {
/** Field. */
private final Field field;
/** Field offset. */
private final long fieldOffs;
/** Field type. */
private final OptimizedFieldType fieldType;
/** Field name. */
private final String fieldName;
/**
* @param field Field.
* @param name Field name.
* @param offset Field offset.
* @param type Grid optimized field type.
*/
FieldInfo(Field field, String name, long offset, OptimizedFieldType type) {
this.field = field;
fieldOffs = offset;
fieldType = type;
fieldName = name;
}
/**
* @return Returns field.
*/
Field field() {
return field;
}
/**
* @return Offset.
*/
long offset() {
return fieldOffs;
}
/**
* @return Type.
*/
OptimizedFieldType type() {
return fieldType;
}
/**
* @return Name.
*/
String name() {
return fieldName;
}
}
/**
* Information about one class.
*/
static class ClassFields {
/** Fields. */
private final List<FieldInfo> fields;
/** */
private final Map<String, Integer> nameToIndex;
/**
* @param fields Field infos.
*/
ClassFields(List<FieldInfo> fields) {
this.fields = fields;
nameToIndex = CommonUtils.newHashMap(fields.size());
for (int i = 0; i < fields.size(); ++i)
nameToIndex.put(fields.get(i).name(), i);
}
/**
* @return Class fields.
*/
List<FieldInfo> fields() {
return fields;
}
/**
* @return Fields count.
*/
int size() {
return fields.size();
}
/**
* @param i Field's index.
* @return FieldInfo.
*/
FieldInfo get(int i) {
return fields.get(i);
}
/**
* @param name Field's name.
* @return Field's index.
*/
int getIndex(String name) {
assert nameToIndex.containsKey(name);
return nameToIndex.get(name);
}
}
/**
* Encapsulates data about class fields.
*/
@SuppressWarnings("PackageVisibleInnerClass")
static class Fields {
/** Fields. */
private final List<ClassFields> fields;
/** Own fields (excluding inherited). */
private final List<Field> ownFields;
/**
* Creates new instance.
*
* @param fields Fields.
*/
Fields(List<ClassFields> fields) {
this.fields = fields;
if (fields.isEmpty())
ownFields = null;
else {
ownFields = new ArrayList<>(fields.size());
for (FieldInfo f : fields.get(fields.size() - 1).fields()) {
if (f.field() != null)
ownFields.add(f.field);
}
}
}
/**
* Returns class's own fields (excluding inherited).
*
* @return List of fields or {@code null} if fields list is empty.
*/
List<Field> ownFields() {
return ownFields;
}
/**
* Returns field types and their offsets.
*
* @param i hierarchy level where 0 corresponds to top level.
* @return list of pairs where first value is field type and second value is its offset.
*/
ClassFields fields(int i) {
return fields.get(i);
}
}
}
|
googleapis/google-cloud-java | 35,795 | java-shopping-merchant-accounts/proto-google-shopping-merchant-accounts-v1/src/main/java/com/google/shopping/merchant/accounts/v1/CreateUserRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/shopping/merchant/accounts/v1/user.proto
// Protobuf Java Version: 3.25.8
package com.google.shopping.merchant.accounts.v1;
/**
*
*
* <pre>
* Request message for the `CreateUser` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1.CreateUserRequest}
*/
public final class CreateUserRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.shopping.merchant.accounts.v1.CreateUserRequest)
CreateUserRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateUserRequest.newBuilder() to construct.
private CreateUserRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateUserRequest() {
parent_ = "";
userId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateUserRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1.UserProto
.internal_static_google_shopping_merchant_accounts_v1_CreateUserRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1.UserProto
.internal_static_google_shopping_merchant_accounts_v1_CreateUserRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1.CreateUserRequest.class,
com.google.shopping.merchant.accounts.v1.CreateUserRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the account for which a user will be
* created. Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The resource name of the account for which a user will be
* created. Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int USER_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object userId_ = "";
/**
*
*
* <pre>
* Required. The email address of the user (for example,
* `john.doe@gmail.com`).
* </pre>
*
* <code>string user_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The userId.
*/
@java.lang.Override
public java.lang.String getUserId() {
java.lang.Object ref = userId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
userId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The email address of the user (for example,
* `john.doe@gmail.com`).
* </pre>
*
* <code>string user_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for userId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getUserIdBytes() {
java.lang.Object ref = userId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
userId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int USER_FIELD_NUMBER = 3;
private com.google.shopping.merchant.accounts.v1.User user_;
/**
*
*
* <pre>
* Optional. The user to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.User user = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the user field is set.
*/
@java.lang.Override
public boolean hasUser() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. The user to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.User user = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The user.
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.User getUser() {
return user_ == null
? com.google.shopping.merchant.accounts.v1.User.getDefaultInstance()
: user_;
}
/**
*
*
* <pre>
* Optional. The user to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.User user = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.UserOrBuilder getUserOrBuilder() {
return user_ == null
? com.google.shopping.merchant.accounts.v1.User.getDefaultInstance()
: user_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(userId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, userId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getUser());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(userId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, userId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getUser());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.shopping.merchant.accounts.v1.CreateUserRequest)) {
return super.equals(obj);
}
com.google.shopping.merchant.accounts.v1.CreateUserRequest other =
(com.google.shopping.merchant.accounts.v1.CreateUserRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getUserId().equals(other.getUserId())) return false;
if (hasUser() != other.hasUser()) return false;
if (hasUser()) {
if (!getUser().equals(other.getUser())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + USER_ID_FIELD_NUMBER;
hash = (53 * hash) + getUserId().hashCode();
if (hasUser()) {
hash = (37 * hash) + USER_FIELD_NUMBER;
hash = (53 * hash) + getUser().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.shopping.merchant.accounts.v1.CreateUserRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1.CreateUserRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.CreateUserRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1.CreateUserRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.CreateUserRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1.CreateUserRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.CreateUserRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1.CreateUserRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.CreateUserRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1.CreateUserRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.CreateUserRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1.CreateUserRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.shopping.merchant.accounts.v1.CreateUserRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for the `CreateUser` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1.CreateUserRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.shopping.merchant.accounts.v1.CreateUserRequest)
com.google.shopping.merchant.accounts.v1.CreateUserRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1.UserProto
.internal_static_google_shopping_merchant_accounts_v1_CreateUserRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1.UserProto
.internal_static_google_shopping_merchant_accounts_v1_CreateUserRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1.CreateUserRequest.class,
com.google.shopping.merchant.accounts.v1.CreateUserRequest.Builder.class);
}
// Construct using com.google.shopping.merchant.accounts.v1.CreateUserRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUserFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
userId_ = "";
user_ = null;
if (userBuilder_ != null) {
userBuilder_.dispose();
userBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.shopping.merchant.accounts.v1.UserProto
.internal_static_google_shopping_merchant_accounts_v1_CreateUserRequest_descriptor;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.CreateUserRequest getDefaultInstanceForType() {
return com.google.shopping.merchant.accounts.v1.CreateUserRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.CreateUserRequest build() {
com.google.shopping.merchant.accounts.v1.CreateUserRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.CreateUserRequest buildPartial() {
com.google.shopping.merchant.accounts.v1.CreateUserRequest result =
new com.google.shopping.merchant.accounts.v1.CreateUserRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.shopping.merchant.accounts.v1.CreateUserRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.userId_ = userId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.user_ = userBuilder_ == null ? user_ : userBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.shopping.merchant.accounts.v1.CreateUserRequest) {
return mergeFrom((com.google.shopping.merchant.accounts.v1.CreateUserRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.shopping.merchant.accounts.v1.CreateUserRequest other) {
if (other == com.google.shopping.merchant.accounts.v1.CreateUserRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getUserId().isEmpty()) {
userId_ = other.userId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasUser()) {
mergeUser(other.getUser());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
userId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getUserFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the account for which a user will be
* created. Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the account for which a user will be
* created. Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the account for which a user will be
* created. Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the account for which a user will be
* created. Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the account for which a user will be
* created. Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object userId_ = "";
/**
*
*
* <pre>
* Required. The email address of the user (for example,
* `john.doe@gmail.com`).
* </pre>
*
* <code>string user_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The userId.
*/
public java.lang.String getUserId() {
java.lang.Object ref = userId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
userId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The email address of the user (for example,
* `john.doe@gmail.com`).
* </pre>
*
* <code>string user_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for userId.
*/
public com.google.protobuf.ByteString getUserIdBytes() {
java.lang.Object ref = userId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
userId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The email address of the user (for example,
* `john.doe@gmail.com`).
* </pre>
*
* <code>string user_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The userId to set.
* @return This builder for chaining.
*/
public Builder setUserId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
userId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The email address of the user (for example,
* `john.doe@gmail.com`).
* </pre>
*
* <code>string user_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearUserId() {
userId_ = getDefaultInstance().getUserId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The email address of the user (for example,
* `john.doe@gmail.com`).
* </pre>
*
* <code>string user_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for userId to set.
* @return This builder for chaining.
*/
public Builder setUserIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
userId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.shopping.merchant.accounts.v1.User user_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1.User,
com.google.shopping.merchant.accounts.v1.User.Builder,
com.google.shopping.merchant.accounts.v1.UserOrBuilder>
userBuilder_;
/**
*
*
* <pre>
* Optional. The user to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.User user = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the user field is set.
*/
public boolean hasUser() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Optional. The user to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.User user = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The user.
*/
public com.google.shopping.merchant.accounts.v1.User getUser() {
if (userBuilder_ == null) {
return user_ == null
? com.google.shopping.merchant.accounts.v1.User.getDefaultInstance()
: user_;
} else {
return userBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. The user to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.User user = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUser(com.google.shopping.merchant.accounts.v1.User value) {
if (userBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
user_ = value;
} else {
userBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The user to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.User user = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUser(com.google.shopping.merchant.accounts.v1.User.Builder builderForValue) {
if (userBuilder_ == null) {
user_ = builderForValue.build();
} else {
userBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The user to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.User user = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeUser(com.google.shopping.merchant.accounts.v1.User value) {
if (userBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& user_ != null
&& user_ != com.google.shopping.merchant.accounts.v1.User.getDefaultInstance()) {
getUserBuilder().mergeFrom(value);
} else {
user_ = value;
}
} else {
userBuilder_.mergeFrom(value);
}
if (user_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. The user to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.User user = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearUser() {
bitField0_ = (bitField0_ & ~0x00000004);
user_ = null;
if (userBuilder_ != null) {
userBuilder_.dispose();
userBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The user to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.User user = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.shopping.merchant.accounts.v1.User.Builder getUserBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getUserFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. The user to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.User user = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.shopping.merchant.accounts.v1.UserOrBuilder getUserOrBuilder() {
if (userBuilder_ != null) {
return userBuilder_.getMessageOrBuilder();
} else {
return user_ == null
? com.google.shopping.merchant.accounts.v1.User.getDefaultInstance()
: user_;
}
}
/**
*
*
* <pre>
* Optional. The user to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.User user = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1.User,
com.google.shopping.merchant.accounts.v1.User.Builder,
com.google.shopping.merchant.accounts.v1.UserOrBuilder>
getUserFieldBuilder() {
if (userBuilder_ == null) {
userBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1.User,
com.google.shopping.merchant.accounts.v1.User.Builder,
com.google.shopping.merchant.accounts.v1.UserOrBuilder>(
getUser(), getParentForChildren(), isClean());
user_ = null;
}
return userBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.shopping.merchant.accounts.v1.CreateUserRequest)
}
// @@protoc_insertion_point(class_scope:google.shopping.merchant.accounts.v1.CreateUserRequest)
private static final com.google.shopping.merchant.accounts.v1.CreateUserRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.shopping.merchant.accounts.v1.CreateUserRequest();
}
public static com.google.shopping.merchant.accounts.v1.CreateUserRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateUserRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateUserRequest>() {
@java.lang.Override
public CreateUserRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateUserRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateUserRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.CreateUserRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,920 | java-dialogflow/proto-google-cloud-dialogflow-v2beta1/src/main/java/com/google/cloud/dialogflow/v2beta1/UpdateConversationProfileRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2beta1/conversation_profile.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2beta1;
/**
*
*
* <pre>
* The request message for
* [ConversationProfiles.UpdateConversationProfile][google.cloud.dialogflow.v2beta1.ConversationProfiles.UpdateConversationProfile].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest}
*/
public final class UpdateConversationProfileRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest)
UpdateConversationProfileRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateConversationProfileRequest.newBuilder() to construct.
private UpdateConversationProfileRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateConversationProfileRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateConversationProfileRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.ConversationProfileProto
.internal_static_google_cloud_dialogflow_v2beta1_UpdateConversationProfileRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.ConversationProfileProto
.internal_static_google_cloud_dialogflow_v2beta1_UpdateConversationProfileRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest.class,
com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest.Builder.class);
}
private int bitField0_;
public static final int CONVERSATION_PROFILE_FIELD_NUMBER = 1;
private com.google.cloud.dialogflow.v2beta1.ConversationProfile conversationProfile_;
/**
*
*
* <pre>
* Required. The conversation profile to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2beta1.ConversationProfile conversation_profile = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the conversationProfile field is set.
*/
@java.lang.Override
public boolean hasConversationProfile() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The conversation profile to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2beta1.ConversationProfile conversation_profile = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The conversationProfile.
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ConversationProfile getConversationProfile() {
return conversationProfile_ == null
? com.google.cloud.dialogflow.v2beta1.ConversationProfile.getDefaultInstance()
: conversationProfile_;
}
/**
*
*
* <pre>
* Required. The conversation profile to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2beta1.ConversationProfile conversation_profile = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ConversationProfileOrBuilder
getConversationProfileOrBuilder() {
return conversationProfile_ == null
? com.google.cloud.dialogflow.v2beta1.ConversationProfile.getDefaultInstance()
: conversationProfile_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The mask to control which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The mask to control which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The mask to control which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getConversationProfile());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getConversationProfile());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest other =
(com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest) obj;
if (hasConversationProfile() != other.hasConversationProfile()) return false;
if (hasConversationProfile()) {
if (!getConversationProfile().equals(other.getConversationProfile())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasConversationProfile()) {
hash = (37 * hash) + CONVERSATION_PROFILE_FIELD_NUMBER;
hash = (53 * hash) + getConversationProfile().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for
* [ConversationProfiles.UpdateConversationProfile][google.cloud.dialogflow.v2beta1.ConversationProfiles.UpdateConversationProfile].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest)
com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.ConversationProfileProto
.internal_static_google_cloud_dialogflow_v2beta1_UpdateConversationProfileRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.ConversationProfileProto
.internal_static_google_cloud_dialogflow_v2beta1_UpdateConversationProfileRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest.class,
com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest.Builder.class);
}
// Construct using
// com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getConversationProfileFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
conversationProfile_ = null;
if (conversationProfileBuilder_ != null) {
conversationProfileBuilder_.dispose();
conversationProfileBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2beta1.ConversationProfileProto
.internal_static_google_cloud_dialogflow_v2beta1_UpdateConversationProfileRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest
getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest build() {
com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest buildPartial() {
com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest result =
new com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.conversationProfile_ =
conversationProfileBuilder_ == null
? conversationProfile_
: conversationProfileBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest) {
return mergeFrom(
(com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest other) {
if (other
== com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest
.getDefaultInstance()) return this;
if (other.hasConversationProfile()) {
mergeConversationProfile(other.getConversationProfile());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(
getConversationProfileFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.dialogflow.v2beta1.ConversationProfile conversationProfile_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.v2beta1.ConversationProfile,
com.google.cloud.dialogflow.v2beta1.ConversationProfile.Builder,
com.google.cloud.dialogflow.v2beta1.ConversationProfileOrBuilder>
conversationProfileBuilder_;
/**
*
*
* <pre>
* Required. The conversation profile to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2beta1.ConversationProfile conversation_profile = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the conversationProfile field is set.
*/
public boolean hasConversationProfile() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The conversation profile to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2beta1.ConversationProfile conversation_profile = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The conversationProfile.
*/
public com.google.cloud.dialogflow.v2beta1.ConversationProfile getConversationProfile() {
if (conversationProfileBuilder_ == null) {
return conversationProfile_ == null
? com.google.cloud.dialogflow.v2beta1.ConversationProfile.getDefaultInstance()
: conversationProfile_;
} else {
return conversationProfileBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The conversation profile to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2beta1.ConversationProfile conversation_profile = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setConversationProfile(
com.google.cloud.dialogflow.v2beta1.ConversationProfile value) {
if (conversationProfileBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
conversationProfile_ = value;
} else {
conversationProfileBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The conversation profile to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2beta1.ConversationProfile conversation_profile = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setConversationProfile(
com.google.cloud.dialogflow.v2beta1.ConversationProfile.Builder builderForValue) {
if (conversationProfileBuilder_ == null) {
conversationProfile_ = builderForValue.build();
} else {
conversationProfileBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The conversation profile to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2beta1.ConversationProfile conversation_profile = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeConversationProfile(
com.google.cloud.dialogflow.v2beta1.ConversationProfile value) {
if (conversationProfileBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& conversationProfile_ != null
&& conversationProfile_
!= com.google.cloud.dialogflow.v2beta1.ConversationProfile.getDefaultInstance()) {
getConversationProfileBuilder().mergeFrom(value);
} else {
conversationProfile_ = value;
}
} else {
conversationProfileBuilder_.mergeFrom(value);
}
if (conversationProfile_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The conversation profile to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2beta1.ConversationProfile conversation_profile = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearConversationProfile() {
bitField0_ = (bitField0_ & ~0x00000001);
conversationProfile_ = null;
if (conversationProfileBuilder_ != null) {
conversationProfileBuilder_.dispose();
conversationProfileBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The conversation profile to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2beta1.ConversationProfile conversation_profile = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dialogflow.v2beta1.ConversationProfile.Builder
getConversationProfileBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getConversationProfileFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The conversation profile to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2beta1.ConversationProfile conversation_profile = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dialogflow.v2beta1.ConversationProfileOrBuilder
getConversationProfileOrBuilder() {
if (conversationProfileBuilder_ != null) {
return conversationProfileBuilder_.getMessageOrBuilder();
} else {
return conversationProfile_ == null
? com.google.cloud.dialogflow.v2beta1.ConversationProfile.getDefaultInstance()
: conversationProfile_;
}
}
/**
*
*
* <pre>
* Required. The conversation profile to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2beta1.ConversationProfile conversation_profile = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.v2beta1.ConversationProfile,
com.google.cloud.dialogflow.v2beta1.ConversationProfile.Builder,
com.google.cloud.dialogflow.v2beta1.ConversationProfileOrBuilder>
getConversationProfileFieldBuilder() {
if (conversationProfileBuilder_ == null) {
conversationProfileBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.v2beta1.ConversationProfile,
com.google.cloud.dialogflow.v2beta1.ConversationProfile.Builder,
com.google.cloud.dialogflow.v2beta1.ConversationProfileOrBuilder>(
getConversationProfile(), getParentForChildren(), isClean());
conversationProfile_ = null;
}
return conversationProfileBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The mask to control which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The mask to control which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The mask to control which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The mask to control which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The mask to control which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The mask to control which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The mask to control which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The mask to control which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The mask to control which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest)
private static final com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest();
}
public static com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateConversationProfileRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateConversationProfileRequest>() {
@java.lang.Override
public UpdateConversationProfileRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateConversationProfileRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateConversationProfileRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.UpdateConversationProfileRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,150 | java-compute/google-cloud-compute/src/main/java/com/google/cloud/compute/v1/stub/HttpJsonSslPoliciesStub.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1.stub;
import static com.google.cloud.compute.v1.SslPoliciesClient.AggregatedListPagedResponse;
import static com.google.cloud.compute.v1.SslPoliciesClient.ListPagedResponse;
import com.google.api.core.InternalApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.httpjson.ApiMethodDescriptor;
import com.google.api.gax.httpjson.HttpJsonCallSettings;
import com.google.api.gax.httpjson.HttpJsonOperationSnapshot;
import com.google.api.gax.httpjson.HttpJsonStubCallableFactory;
import com.google.api.gax.httpjson.ProtoMessageRequestFormatter;
import com.google.api.gax.httpjson.ProtoMessageResponseParser;
import com.google.api.gax.httpjson.ProtoRestSerializer;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.RequestParamsBuilder;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.compute.v1.AggregatedListSslPoliciesRequest;
import com.google.cloud.compute.v1.DeleteSslPolicyRequest;
import com.google.cloud.compute.v1.GetSslPolicyRequest;
import com.google.cloud.compute.v1.InsertSslPolicyRequest;
import com.google.cloud.compute.v1.ListAvailableFeaturesSslPoliciesRequest;
import com.google.cloud.compute.v1.ListSslPoliciesRequest;
import com.google.cloud.compute.v1.Operation;
import com.google.cloud.compute.v1.Operation.Status;
import com.google.cloud.compute.v1.PatchSslPolicyRequest;
import com.google.cloud.compute.v1.SslPoliciesAggregatedList;
import com.google.cloud.compute.v1.SslPoliciesList;
import com.google.cloud.compute.v1.SslPoliciesListAvailableFeaturesResponse;
import com.google.cloud.compute.v1.SslPolicy;
import com.google.protobuf.TypeRegistry;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* REST stub implementation for the SslPolicies service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public class HttpJsonSslPoliciesStub extends SslPoliciesStub {
private static final TypeRegistry typeRegistry =
TypeRegistry.newBuilder().add(Operation.getDescriptor()).build();
private static final ApiMethodDescriptor<
AggregatedListSslPoliciesRequest, SslPoliciesAggregatedList>
aggregatedListMethodDescriptor =
ApiMethodDescriptor
.<AggregatedListSslPoliciesRequest, SslPoliciesAggregatedList>newBuilder()
.setFullMethodName("google.cloud.compute.v1.SslPolicies/AggregatedList")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<AggregatedListSslPoliciesRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/aggregated/sslPolicies",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<AggregatedListSslPoliciesRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "project", request.getProject());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<AggregatedListSslPoliciesRequest> serializer =
ProtoRestSerializer.create();
if (request.hasFilter()) {
serializer.putQueryParam(fields, "filter", request.getFilter());
}
if (request.hasIncludeAllScopes()) {
serializer.putQueryParam(
fields, "includeAllScopes", request.getIncludeAllScopes());
}
if (request.hasMaxResults()) {
serializer.putQueryParam(
fields, "maxResults", request.getMaxResults());
}
if (request.hasOrderBy()) {
serializer.putQueryParam(fields, "orderBy", request.getOrderBy());
}
if (request.hasPageToken()) {
serializer.putQueryParam(fields, "pageToken", request.getPageToken());
}
if (request.hasReturnPartialSuccess()) {
serializer.putQueryParam(
fields,
"returnPartialSuccess",
request.getReturnPartialSuccess());
}
if (request.hasServiceProjectNumber()) {
serializer.putQueryParam(
fields,
"serviceProjectNumber",
request.getServiceProjectNumber());
}
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<SslPoliciesAggregatedList>newBuilder()
.setDefaultInstance(SslPoliciesAggregatedList.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<DeleteSslPolicyRequest, Operation>
deleteMethodDescriptor =
ApiMethodDescriptor.<DeleteSslPolicyRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.compute.v1.SslPolicies/Delete")
.setHttpMethod("DELETE")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<DeleteSslPolicyRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/global/sslPolicies/{sslPolicy}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<DeleteSslPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "project", request.getProject());
serializer.putPathParam(fields, "sslPolicy", request.getSslPolicy());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<DeleteSslPolicyRequest> serializer =
ProtoRestSerializer.create();
if (request.hasRequestId()) {
serializer.putQueryParam(fields, "requestId", request.getRequestId());
}
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(DeleteSslPolicyRequest request, Operation response) -> {
StringBuilder opName = new StringBuilder(response.getName());
opName.append(":").append(request.getProject());
return HttpJsonOperationSnapshot.newBuilder()
.setName(opName.toString())
.setMetadata(response)
.setDone(Status.DONE.equals(response.getStatus()))
.setResponse(response)
.setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage())
.build();
})
.build();
private static final ApiMethodDescriptor<GetSslPolicyRequest, SslPolicy> getMethodDescriptor =
ApiMethodDescriptor.<GetSslPolicyRequest, SslPolicy>newBuilder()
.setFullMethodName("google.cloud.compute.v1.SslPolicies/Get")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetSslPolicyRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/global/sslPolicies/{sslPolicy}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetSslPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "project", request.getProject());
serializer.putPathParam(fields, "sslPolicy", request.getSslPolicy());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetSslPolicyRequest> serializer =
ProtoRestSerializer.create();
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<SslPolicy>newBuilder()
.setDefaultInstance(SslPolicy.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<InsertSslPolicyRequest, Operation>
insertMethodDescriptor =
ApiMethodDescriptor.<InsertSslPolicyRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.compute.v1.SslPolicies/Insert")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<InsertSslPolicyRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/global/sslPolicies",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<InsertSslPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "project", request.getProject());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<InsertSslPolicyRequest> serializer =
ProtoRestSerializer.create();
if (request.hasRequestId()) {
serializer.putQueryParam(fields, "requestId", request.getRequestId());
}
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody(
"sslPolicyResource", request.getSslPolicyResource(), false))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(InsertSslPolicyRequest request, Operation response) -> {
StringBuilder opName = new StringBuilder(response.getName());
opName.append(":").append(request.getProject());
return HttpJsonOperationSnapshot.newBuilder()
.setName(opName.toString())
.setMetadata(response)
.setDone(Status.DONE.equals(response.getStatus()))
.setResponse(response)
.setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage())
.build();
})
.build();
private static final ApiMethodDescriptor<ListSslPoliciesRequest, SslPoliciesList>
listMethodDescriptor =
ApiMethodDescriptor.<ListSslPoliciesRequest, SslPoliciesList>newBuilder()
.setFullMethodName("google.cloud.compute.v1.SslPolicies/List")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ListSslPoliciesRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/global/sslPolicies",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ListSslPoliciesRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "project", request.getProject());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ListSslPoliciesRequest> serializer =
ProtoRestSerializer.create();
if (request.hasFilter()) {
serializer.putQueryParam(fields, "filter", request.getFilter());
}
if (request.hasMaxResults()) {
serializer.putQueryParam(
fields, "maxResults", request.getMaxResults());
}
if (request.hasOrderBy()) {
serializer.putQueryParam(fields, "orderBy", request.getOrderBy());
}
if (request.hasPageToken()) {
serializer.putQueryParam(fields, "pageToken", request.getPageToken());
}
if (request.hasReturnPartialSuccess()) {
serializer.putQueryParam(
fields,
"returnPartialSuccess",
request.getReturnPartialSuccess());
}
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<SslPoliciesList>newBuilder()
.setDefaultInstance(SslPoliciesList.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<
ListAvailableFeaturesSslPoliciesRequest, SslPoliciesListAvailableFeaturesResponse>
listAvailableFeaturesMethodDescriptor =
ApiMethodDescriptor
.<ListAvailableFeaturesSslPoliciesRequest, SslPoliciesListAvailableFeaturesResponse>
newBuilder()
.setFullMethodName("google.cloud.compute.v1.SslPolicies/ListAvailableFeatures")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ListAvailableFeaturesSslPoliciesRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/global/sslPolicies/listAvailableFeatures",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ListAvailableFeaturesSslPoliciesRequest>
serializer = ProtoRestSerializer.create();
serializer.putPathParam(fields, "project", request.getProject());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ListAvailableFeaturesSslPoliciesRequest>
serializer = ProtoRestSerializer.create();
if (request.hasFilter()) {
serializer.putQueryParam(fields, "filter", request.getFilter());
}
if (request.hasMaxResults()) {
serializer.putQueryParam(
fields, "maxResults", request.getMaxResults());
}
if (request.hasOrderBy()) {
serializer.putQueryParam(fields, "orderBy", request.getOrderBy());
}
if (request.hasPageToken()) {
serializer.putQueryParam(fields, "pageToken", request.getPageToken());
}
if (request.hasReturnPartialSuccess()) {
serializer.putQueryParam(
fields,
"returnPartialSuccess",
request.getReturnPartialSuccess());
}
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<SslPoliciesListAvailableFeaturesResponse>newBuilder()
.setDefaultInstance(
SslPoliciesListAvailableFeaturesResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<PatchSslPolicyRequest, Operation> patchMethodDescriptor =
ApiMethodDescriptor.<PatchSslPolicyRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.compute.v1.SslPolicies/Patch")
.setHttpMethod("PATCH")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<PatchSslPolicyRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/global/sslPolicies/{sslPolicy}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<PatchSslPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "project", request.getProject());
serializer.putPathParam(fields, "sslPolicy", request.getSslPolicy());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<PatchSslPolicyRequest> serializer =
ProtoRestSerializer.create();
if (request.hasRequestId()) {
serializer.putQueryParam(fields, "requestId", request.getRequestId());
}
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("sslPolicyResource", request.getSslPolicyResource(), false))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(PatchSslPolicyRequest request, Operation response) -> {
StringBuilder opName = new StringBuilder(response.getName());
opName.append(":").append(request.getProject());
return HttpJsonOperationSnapshot.newBuilder()
.setName(opName.toString())
.setMetadata(response)
.setDone(Status.DONE.equals(response.getStatus()))
.setResponse(response)
.setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage())
.build();
})
.build();
private final UnaryCallable<AggregatedListSslPoliciesRequest, SslPoliciesAggregatedList>
aggregatedListCallable;
private final UnaryCallable<AggregatedListSslPoliciesRequest, AggregatedListPagedResponse>
aggregatedListPagedCallable;
private final UnaryCallable<DeleteSslPolicyRequest, Operation> deleteCallable;
private final OperationCallable<DeleteSslPolicyRequest, Operation, Operation>
deleteOperationCallable;
private final UnaryCallable<GetSslPolicyRequest, SslPolicy> getCallable;
private final UnaryCallable<InsertSslPolicyRequest, Operation> insertCallable;
private final OperationCallable<InsertSslPolicyRequest, Operation, Operation>
insertOperationCallable;
private final UnaryCallable<ListSslPoliciesRequest, SslPoliciesList> listCallable;
private final UnaryCallable<ListSslPoliciesRequest, ListPagedResponse> listPagedCallable;
private final UnaryCallable<
ListAvailableFeaturesSslPoliciesRequest, SslPoliciesListAvailableFeaturesResponse>
listAvailableFeaturesCallable;
private final UnaryCallable<PatchSslPolicyRequest, Operation> patchCallable;
private final OperationCallable<PatchSslPolicyRequest, Operation, Operation>
patchOperationCallable;
private final BackgroundResource backgroundResources;
private final HttpJsonGlobalOperationsStub httpJsonOperationsStub;
private final HttpJsonStubCallableFactory callableFactory;
public static final HttpJsonSslPoliciesStub create(SslPoliciesStubSettings settings)
throws IOException {
return new HttpJsonSslPoliciesStub(settings, ClientContext.create(settings));
}
public static final HttpJsonSslPoliciesStub create(ClientContext clientContext)
throws IOException {
return new HttpJsonSslPoliciesStub(SslPoliciesStubSettings.newBuilder().build(), clientContext);
}
public static final HttpJsonSslPoliciesStub create(
ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException {
return new HttpJsonSslPoliciesStub(
SslPoliciesStubSettings.newBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of HttpJsonSslPoliciesStub, using the given settings. This is protected
* so that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected HttpJsonSslPoliciesStub(SslPoliciesStubSettings settings, ClientContext clientContext)
throws IOException {
this(settings, clientContext, new HttpJsonSslPoliciesCallableFactory());
}
/**
* Constructs an instance of HttpJsonSslPoliciesStub, using the given settings. This is protected
* so that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected HttpJsonSslPoliciesStub(
SslPoliciesStubSettings settings,
ClientContext clientContext,
HttpJsonStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.httpJsonOperationsStub =
HttpJsonGlobalOperationsStub.create(clientContext, callableFactory);
HttpJsonCallSettings<AggregatedListSslPoliciesRequest, SslPoliciesAggregatedList>
aggregatedListTransportSettings =
HttpJsonCallSettings
.<AggregatedListSslPoliciesRequest, SslPoliciesAggregatedList>newBuilder()
.setMethodDescriptor(aggregatedListMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("project", String.valueOf(request.getProject()));
return builder.build();
})
.build();
HttpJsonCallSettings<DeleteSslPolicyRequest, Operation> deleteTransportSettings =
HttpJsonCallSettings.<DeleteSslPolicyRequest, Operation>newBuilder()
.setMethodDescriptor(deleteMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("project", String.valueOf(request.getProject()));
builder.add("ssl_policy", String.valueOf(request.getSslPolicy()));
return builder.build();
})
.build();
HttpJsonCallSettings<GetSslPolicyRequest, SslPolicy> getTransportSettings =
HttpJsonCallSettings.<GetSslPolicyRequest, SslPolicy>newBuilder()
.setMethodDescriptor(getMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("project", String.valueOf(request.getProject()));
builder.add("ssl_policy", String.valueOf(request.getSslPolicy()));
return builder.build();
})
.build();
HttpJsonCallSettings<InsertSslPolicyRequest, Operation> insertTransportSettings =
HttpJsonCallSettings.<InsertSslPolicyRequest, Operation>newBuilder()
.setMethodDescriptor(insertMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("project", String.valueOf(request.getProject()));
return builder.build();
})
.build();
HttpJsonCallSettings<ListSslPoliciesRequest, SslPoliciesList> listTransportSettings =
HttpJsonCallSettings.<ListSslPoliciesRequest, SslPoliciesList>newBuilder()
.setMethodDescriptor(listMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("project", String.valueOf(request.getProject()));
return builder.build();
})
.build();
HttpJsonCallSettings<
ListAvailableFeaturesSslPoliciesRequest, SslPoliciesListAvailableFeaturesResponse>
listAvailableFeaturesTransportSettings =
HttpJsonCallSettings
.<ListAvailableFeaturesSslPoliciesRequest, SslPoliciesListAvailableFeaturesResponse>
newBuilder()
.setMethodDescriptor(listAvailableFeaturesMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("project", String.valueOf(request.getProject()));
return builder.build();
})
.build();
HttpJsonCallSettings<PatchSslPolicyRequest, Operation> patchTransportSettings =
HttpJsonCallSettings.<PatchSslPolicyRequest, Operation>newBuilder()
.setMethodDescriptor(patchMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("project", String.valueOf(request.getProject()));
builder.add("ssl_policy", String.valueOf(request.getSslPolicy()));
return builder.build();
})
.build();
this.aggregatedListCallable =
callableFactory.createUnaryCallable(
aggregatedListTransportSettings, settings.aggregatedListSettings(), clientContext);
this.aggregatedListPagedCallable =
callableFactory.createPagedCallable(
aggregatedListTransportSettings, settings.aggregatedListSettings(), clientContext);
this.deleteCallable =
callableFactory.createUnaryCallable(
deleteTransportSettings, settings.deleteSettings(), clientContext);
this.deleteOperationCallable =
callableFactory.createOperationCallable(
deleteTransportSettings,
settings.deleteOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.getCallable =
callableFactory.createUnaryCallable(
getTransportSettings, settings.getSettings(), clientContext);
this.insertCallable =
callableFactory.createUnaryCallable(
insertTransportSettings, settings.insertSettings(), clientContext);
this.insertOperationCallable =
callableFactory.createOperationCallable(
insertTransportSettings,
settings.insertOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.listCallable =
callableFactory.createUnaryCallable(
listTransportSettings, settings.listSettings(), clientContext);
this.listPagedCallable =
callableFactory.createPagedCallable(
listTransportSettings, settings.listSettings(), clientContext);
this.listAvailableFeaturesCallable =
callableFactory.createUnaryCallable(
listAvailableFeaturesTransportSettings,
settings.listAvailableFeaturesSettings(),
clientContext);
this.patchCallable =
callableFactory.createUnaryCallable(
patchTransportSettings, settings.patchSettings(), clientContext);
this.patchOperationCallable =
callableFactory.createOperationCallable(
patchTransportSettings,
settings.patchOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
@InternalApi
public static List<ApiMethodDescriptor> getMethodDescriptors() {
List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>();
methodDescriptors.add(aggregatedListMethodDescriptor);
methodDescriptors.add(deleteMethodDescriptor);
methodDescriptors.add(getMethodDescriptor);
methodDescriptors.add(insertMethodDescriptor);
methodDescriptors.add(listMethodDescriptor);
methodDescriptors.add(listAvailableFeaturesMethodDescriptor);
methodDescriptors.add(patchMethodDescriptor);
return methodDescriptors;
}
@Override
public UnaryCallable<AggregatedListSslPoliciesRequest, SslPoliciesAggregatedList>
aggregatedListCallable() {
return aggregatedListCallable;
}
@Override
public UnaryCallable<AggregatedListSslPoliciesRequest, AggregatedListPagedResponse>
aggregatedListPagedCallable() {
return aggregatedListPagedCallable;
}
@Override
public UnaryCallable<DeleteSslPolicyRequest, Operation> deleteCallable() {
return deleteCallable;
}
@Override
public OperationCallable<DeleteSslPolicyRequest, Operation, Operation> deleteOperationCallable() {
return deleteOperationCallable;
}
@Override
public UnaryCallable<GetSslPolicyRequest, SslPolicy> getCallable() {
return getCallable;
}
@Override
public UnaryCallable<InsertSslPolicyRequest, Operation> insertCallable() {
return insertCallable;
}
@Override
public OperationCallable<InsertSslPolicyRequest, Operation, Operation> insertOperationCallable() {
return insertOperationCallable;
}
@Override
public UnaryCallable<ListSslPoliciesRequest, SslPoliciesList> listCallable() {
return listCallable;
}
@Override
public UnaryCallable<ListSslPoliciesRequest, ListPagedResponse> listPagedCallable() {
return listPagedCallable;
}
@Override
public UnaryCallable<
ListAvailableFeaturesSslPoliciesRequest, SslPoliciesListAvailableFeaturesResponse>
listAvailableFeaturesCallable() {
return listAvailableFeaturesCallable;
}
@Override
public UnaryCallable<PatchSslPolicyRequest, Operation> patchCallable() {
return patchCallable;
}
@Override
public OperationCallable<PatchSslPolicyRequest, Operation, Operation> patchOperationCallable() {
return patchOperationCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
|
openjdk/jdk8 | 35,793 | jdk/test/sun/management/jmxremote/bootstrap/RmiBootstrapTest.java | /*
* Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
import sun.management.jmxremote.ConnectorBootstrap;
import java.io.File;
import java.io.FileInputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.net.BindException;
import java.net.ServerSocket;
import java.rmi.server.ExportException;
import java.util.Properties;
import java.util.Iterator;
import java.util.Set;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Enumeration;
import javax.management.remote.*;
import javax.management.*;
import sun.management.AgentConfigurationError;
import util.TestLogger;
/**
* <p>This class implements unit test for RMI Bootstrap.
* When called with no arguments main() looks in the directory indicated
* by the "test.src" system property for files called management*ok.properties
* or management*ko.properties. The *ok.properties files are assumed to be
* valid Java M&M config files for which the bootstrap should succeed.
* The *ko.properties files are assumed to be configurations for which the
* bootstrap & connection test will fail.</p>
*
* <p>The rmi port number can be specified with the "rmi.port" system property.
* If not, this test will use the first available port</p>
*
* <p>When called with some argument, the main() will interprete its args to
* be Java M&M configuration file names. The filenames are expected to end
* with ok.properties or ko.properties - and are interpreted as above.</p>
*
* <p>Note that a limitation of the RMI registry (bug 4267864) prevent
* this test from succeeding if more than 1 configuration is used.
* As long as 4267864 isn't fix, this test must be called as many times
* as needed but with a single argument (no arguments, or several arguments
* will fail).</p>
*
* <p>Debug traces are logged in "sun.management.test"</p>
**/
public class RmiBootstrapTest {
// the number of consecutive ports to test for availability
private static final int PORT_TEST_LEN = 800;
static TestLogger log =
new TestLogger("RmiBootstrapTest");
/**
* When launching several registries, we increment the port number
* to avoid falling into "port number already in use" problems.
**/
static int testPort = 0;
static int basePort = 0;
/**
* Default values for RMI configuration properties.
**/
public static interface DefaultValues {
public static final String PORT="0";
public static final String CONFIG_FILE_NAME="management.properties";
public static final String USE_SSL="true";
public static final String USE_AUTHENTICATION="true";
public static final String PASSWORD_FILE_NAME="jmxremote.password";
public static final String ACCESS_FILE_NAME="jmxremote.access";
public static final String KEYSTORE="keystore";
public static final String KEYSTORE_PASSWD="password";
public static final String TRUSTSTORE="truststore";
public static final String TRUSTSTORE_PASSWD="trustword";
public static final String SSL_NEED_CLIENT_AUTH="false";
}
/**
* Names of RMI configuration properties.
**/
public static interface PropertyNames {
public static final String PORT=
"com.sun.management.jmxremote.port";
public static final String CONFIG_FILE_NAME=
"com.sun.management.config.file";
public static final String USE_SSL=
"com.sun.management.jmxremote.ssl";
public static final String USE_AUTHENTICATION=
"com.sun.management.jmxremote.authenticate";
public static final String PASSWORD_FILE_NAME=
"com.sun.management.jmxremote.password.file";
public static final String ACCESS_FILE_NAME=
"com.sun.management.jmxremote.access.file";
public static final String INSTRUMENT_ALL=
"com.sun.management.instrumentall";
public static final String CREDENTIALS =
"jmx.remote.credentials";
public static final String KEYSTORE=
"javax.net.ssl.keyStore";
public static final String KEYSTORE_PASSWD=
"javax.net.ssl.keyStorePassword";
public static final String TRUSTSTORE=
"javax.net.ssl.trustStore";
public static final String TRUSTSTORE_PASSWD=
"javax.net.ssl.trustStorePassword";
public static final String SSL_ENABLED_CIPHER_SUITES =
"com.sun.management.jmxremote.ssl.enabled.cipher.suites";
public static final String SSL_ENABLED_PROTOCOLS =
"com.sun.management.jmxremote.ssl.enabled.protocols";
public static final String SSL_NEED_CLIENT_AUTH =
"com.sun.management.jmxremote.ssl.need.client.auth";
}
/**
* A filter to find all filenames who match <prefix>*<suffix>.
* Note that <prefix> and <suffix> can overlap.
**/
private static class ConfigFilenameFilter implements FilenameFilter {
final String suffix;
final String prefix;
ConfigFilenameFilter(String prefix, String suffix) {
this.suffix=suffix;
this.prefix=prefix;
}
public boolean accept(File dir, String name) {
return (name.startsWith(prefix) && name.endsWith(suffix));
}
}
/**
* Get all "management*ok.properties" files in the directory
* indicated by the "test.src" management property.
**/
private static File[] findConfigurationFilesOk() {
final String testSrc = System.getProperty("test.src");
final File dir = new File(testSrc);
final FilenameFilter filter =
new ConfigFilenameFilter("management_test","ok.properties");
return dir.listFiles(filter);
}
/**
* Get all "management*ko.properties" files in the directory
* indicated by the "test.src" management property.
**/
private static File[] findConfigurationFilesKo() {
final String testSrc = System.getProperty("test.src");
final File dir = new File(testSrc);
final FilenameFilter filter =
new ConfigFilenameFilter("management_test","ko.properties");
return dir.listFiles(filter);
}
/**
* List all MBeans and their attributes. Used to test communication
* with the Java M&M MBean Server.
* @return the number of queried MBeans.
*/
public static int listMBeans(MBeanServerConnection server)
throws IOException {
return listMBeans(server,null,null);
}
/**
* List all matching MBeans and their attributes.
* Used to test communication with the Java M&M MBean Server.
* @return the number of matching MBeans.
*/
public static int listMBeans(MBeanServerConnection server,
ObjectName pattern, QueryExp query)
throws IOException {
final Set names = server.queryNames(pattern,query);
for (final Iterator i=names.iterator(); i.hasNext(); ) {
ObjectName name = (ObjectName)i.next();
log.trace("listMBeans","Got MBean: "+name);
try {
MBeanInfo info =
server.getMBeanInfo((ObjectName)name);
MBeanAttributeInfo[] attrs = info.getAttributes();
if (attrs == null) continue;
for (int j=0; j<attrs.length; j++) {
if (attrs[j].isReadable()) {
try {
Object o =
server.getAttribute(name,attrs[j].getName());
if (log.isDebugOn())
log.debug("listMBeans","\t\t" +
attrs[j].getName() +
" = "+o);
} catch (Exception x) {
log.trace("listMBeans","JmxClient failed to get " +
attrs[j].getName() + ": " + x);
final IOException io =
new IOException("JmxClient failed to get " +
attrs[j].getName());
io.initCause(x);
throw io;
}
}
}
} catch (Exception x) {
log.trace("listMBeans",
"JmxClient failed to get MBeanInfo: " + x);
final IOException io =
new IOException("JmxClient failed to get MBeanInfo: "+x);
io.initCause(x);
throw io;
}
}
return names.size();
}
/**
* Compute the full path name for a default file.
* @param basename basename (with extension) of the default file.
* @return ${JRE}/lib/management/${basename}
**/
private static String getDefaultFileName(String basename) {
final String fileSeparator = File.separator;
final StringBuffer defaultFileName =
new StringBuffer(System.getProperty("java.home")).
append(fileSeparator).append("lib").append(fileSeparator).
append("management").append(fileSeparator).
append(basename);
return defaultFileName.toString();
}
/**
* Compute the full path name for a default file.
* @param basename basename (with extension) of the default file.
* @return ${JRE}/lib/management/${basename}
**/
private static String getDefaultStoreName(String basename) {
final String fileSeparator = File.separator;
final StringBuffer defaultFileName =
new StringBuffer(System.getProperty("test.src")).
append(fileSeparator).append("ssl").append(fileSeparator).
append(basename);
return defaultFileName.toString();
}
/**
* Parses the password file to read the credentials.
* Returns an ArrayList of arrays of 2 string:
* {<subject>, <password>}.
* If the password file does not exists, return an empty list.
* (File not found = empty file).
**/
private ArrayList readCredentials(String passwordFileName)
throws IOException {
final Properties pws = new Properties();
final ArrayList result = new ArrayList();
final File f = new File(passwordFileName);
if (!f.exists()) return result;
FileInputStream fin = new FileInputStream(passwordFileName);
try {pws.load(fin);}finally{fin.close();}
for (Enumeration en=pws.propertyNames();en.hasMoreElements();) {
final String[] cred = new String[2];
cred[0]=(String)en.nextElement();
cred[1]=pws.getProperty(cred[0]);
result.add(cred);
}
return result;
}
/**
* Connect with the given url, using all given credentials in turn.
* A null entry in the useCredentials arrays indicate a connection
* where no credentials are used.
* @param url JMXServiceURL of the server.
* @param useCredentials An array of credentials (a credential
* is a two String array, so this is an array of arrays
* of strings:
* useCredentials[i][0]=subject
* useCredentials[i][1]=password
* if useCredentials[i] == null means no credentials.
* @param expectConnectOk true if connection is expected to succeed
* Note: if expectConnectOk=false and the test fails to connect
* the number of failure is not incremented. Conversely,
* if expectConnectOk=false and the test does not fail to
* connect the number of failure is incremented.
* @param expectReadOk true if communication (listMBeans) is expected
* to succeed.
* Note: if expectReadOk=false and the test fails to read MBeans
* the number of failure is not incremented. Conversely,
* if expectReadOk=false and the test does not fail to
* read MBeans the number of failure is incremented.
* @return number of failure.
**/
public int connectAndRead(JMXServiceURL url,
Object[] useCredentials,
boolean expectConnectOk,
boolean expectReadOk)
throws IOException {
int errorCount = 0;
for (int i=0 ; i<useCredentials.length ; i++) {
final Map m = new HashMap();
final String[] credentials = (String[])useCredentials[i];
final String crinfo;
if (credentials != null) {
crinfo = "{"+credentials[0] + ", " + credentials[1] + "}";
m.put(PropertyNames.CREDENTIALS,credentials);
} else {
crinfo="no credentials";
}
log.trace("testCommunication","using credentials: " + crinfo);
final JMXConnector c;
try {
c = JMXConnectorFactory.connect(url,m);
} catch (IOException x ) {
if (expectConnectOk) {
final String err = "Connection failed for " + crinfo +
": " + x;
System.out.println(err);
log.trace("testCommunication",err);
log.debug("testCommunication",x);
errorCount++;
continue;
} else {
System.out.println("Connection failed as expected for " +
crinfo + ": " + x);
continue;
}
} catch (RuntimeException x ) {
if (expectConnectOk) {
final String err = "Connection failed for " + crinfo +
": " + x;
System.out.println(err);
log.trace("testCommunication",err);
log.debug("testCommunication",x);
errorCount++;
continue;
} else {
System.out.println("Connection failed as expected for " +
crinfo + ": " + x);
continue;
}
}
try {
MBeanServerConnection conn =
c.getMBeanServerConnection();
if (log.isDebugOn()) {
log.debug("testCommunication","Connection is:" + conn);
log.debug("testCommunication","Server domain is: " +
conn.getDefaultDomain());
}
final ObjectName pattern =
new ObjectName("java.lang:type=Memory,*");
final int count = listMBeans(conn,pattern,null);
if (count == 0)
throw new Exception("Expected at least one matching "+
"MBean for "+pattern);
if (expectReadOk) {
System.out.println("Communication succeeded " +
"as expected for "+
crinfo + ": found " + count
+ ((count<2)?"MBean":"MBeans"));
} else {
final String err = "Expected failure didn't occur for " +
crinfo;
System.out.println(err);
errorCount++;
}
} catch (IOException x ) {
if (expectReadOk) {
final String err = "Communication failed with " + crinfo +
": " + x;
System.out.println(err);
log.trace("testCommunication",err);
log.debug("testCommunication",x);
errorCount++;
continue;
} else {
System.out.println("Communication failed as expected for "+
crinfo + ": " + x);
continue;
}
} catch (RuntimeException x ) {
if (expectReadOk) {
final String err = "Communication failed with " + crinfo +
": " + x;
System.out.println(err);
log.trace("testCommunication",err);
log.debug("testCommunication",x);
errorCount++;
continue;
} else {
System.out.println("Communication failed as expected for "+
crinfo + ": " + x);
}
} catch (Exception x) {
final String err = "Failed to read MBeans with " + crinfo +
": " + x;
System.out.println(err);
log.trace("testCommunication",err);
log.debug("testCommunication",x);
errorCount++;
continue;
} finally {
c.close();
}
}
return errorCount;
}
private void setSslProperties() {
final String defaultKeyStore =
getDefaultStoreName(DefaultValues.KEYSTORE);
final String defaultTrustStore =
getDefaultStoreName(DefaultValues.TRUSTSTORE);
final String keyStore =
System.getProperty(PropertyNames.KEYSTORE, defaultKeyStore);
System.setProperty(PropertyNames.KEYSTORE,keyStore);
log.trace("setSslProperties",PropertyNames.KEYSTORE+"="+keyStore);
final String password =
System.getProperty(PropertyNames.KEYSTORE_PASSWD,
DefaultValues.KEYSTORE_PASSWD);
System.setProperty(PropertyNames.KEYSTORE_PASSWD,password);
log.trace("setSslProperties",
PropertyNames.KEYSTORE_PASSWD+"="+password);
final String trustStore =
System.getProperty(PropertyNames.TRUSTSTORE,
defaultTrustStore);
System.setProperty(PropertyNames.TRUSTSTORE,trustStore);
log.trace("setSslProperties",
PropertyNames.TRUSTSTORE+"="+trustStore);
final String trustword =
System.getProperty(PropertyNames.TRUSTSTORE_PASSWD,
DefaultValues.TRUSTSTORE_PASSWD);
System.setProperty(PropertyNames.TRUSTSTORE_PASSWD,trustword);
log.trace("setSslProperties",
PropertyNames.TRUSTSTORE_PASSWD+"="+trustword);
}
private void checkSslConfiguration() {
try {
final String defaultConf =
getDefaultFileName(DefaultValues.CONFIG_FILE_NAME);
final String confname =
System.getProperty(PropertyNames.CONFIG_FILE_NAME,defaultConf);
final Properties props = new Properties();
final File conf = new File(confname);
if (conf.exists()) {
FileInputStream fin = new FileInputStream(conf);
try {props.load(fin);} finally {fin.close();}
}
// Do we use SSL?
final String useSslStr =
props.getProperty(PropertyNames.USE_SSL,
DefaultValues.USE_SSL);
final boolean useSsl =
Boolean.valueOf(useSslStr).booleanValue();
log.debug("checkSslConfiguration",
PropertyNames.USE_SSL+"="+useSsl+
": setting SSL");
// Do we use SSL client authentication?
final String useSslClientAuthStr =
props.getProperty(PropertyNames.SSL_NEED_CLIENT_AUTH,
DefaultValues.SSL_NEED_CLIENT_AUTH);
final boolean useSslClientAuth =
Boolean.valueOf(useSslClientAuthStr).booleanValue();
log.debug("checkSslConfiguration",
PropertyNames.SSL_NEED_CLIENT_AUTH+"="+useSslClientAuth);
// Do we use customized SSL cipher suites?
final String sslCipherSuites =
props.getProperty(PropertyNames.SSL_ENABLED_CIPHER_SUITES);
log.debug("checkSslConfiguration",
PropertyNames.SSL_ENABLED_CIPHER_SUITES + "=" +
sslCipherSuites);
// Do we use customized SSL protocols?
final String sslProtocols =
props.getProperty(PropertyNames.SSL_ENABLED_PROTOCOLS);
log.debug("checkSslConfiguration",
PropertyNames.SSL_ENABLED_PROTOCOLS + "=" +
sslProtocols);
if (useSsl) setSslProperties();
} catch (Exception x) {
System.out.println("Failed to setup SSL configuration: " + x);
log.debug("checkSslConfiguration",x);
}
}
/**
* Tests the server bootstraped at the given URL.
* Uses the system properties to determine which config file is used.
* Loads the config file to determine which password file is used.
* Loads the password file to find out wich credentials to use.
* Also checks that unregistered user/passwords are not allowed to
* connect when a password file is used.
*
* This method calls connectAndRead().
**/
public void testCommunication(JMXServiceURL url)
throws IOException {
final String defaultConf =
getDefaultFileName(DefaultValues.CONFIG_FILE_NAME);
final String confname =
System.getProperty(PropertyNames.CONFIG_FILE_NAME,defaultConf);
final Properties props = new Properties();
final File conf = new File(confname);
if (conf.exists()) {
FileInputStream fin = new FileInputStream(conf);
try {props.load(fin);} finally {fin.close();}
}
// Do we use authentication?
final String useAuthenticationStr =
props.getProperty(PropertyNames.USE_AUTHENTICATION,
DefaultValues.USE_AUTHENTICATION);
final boolean useAuthentication =
Boolean.valueOf(useAuthenticationStr).booleanValue();
// Get Password File
final String defaultPasswordFileName = Utils.convertPath(
getDefaultFileName(DefaultValues.PASSWORD_FILE_NAME));
final String passwordFileName = Utils.convertPath(
props.getProperty(PropertyNames.PASSWORD_FILE_NAME,
defaultPasswordFileName));
// Get Access File
final String defaultAccessFileName = Utils.convertPath(
getDefaultFileName(DefaultValues.ACCESS_FILE_NAME));
final String accessFileName = Utils.convertPath(
props.getProperty(PropertyNames.ACCESS_FILE_NAME,
defaultAccessFileName));
if (useAuthentication) {
System.out.println("PasswordFileName: " + passwordFileName);
System.out.println("accessFileName: " + accessFileName);
}
final Object[] allCredentials;
final Object[] noCredentials = { null };
if (useAuthentication) {
final ArrayList l = readCredentials(passwordFileName);
if (l.size() == 0) allCredentials = null;
else allCredentials = l.toArray();
} else allCredentials = noCredentials;
int errorCount = 0;
if (allCredentials!=null) {
// Tests that the registered user/passwords are allowed to
// connect & read
//
errorCount += connectAndRead(url,allCredentials,true,true);
} else {
// Tests that no one is allowed
// connect & read
//
final String[][] someCredentials = {
null,
{ "modify", "R&D" },
{ "measure", "QED" }
};
errorCount += connectAndRead(url,someCredentials,false,false);
}
if (useAuthentication && allCredentials != noCredentials) {
// Tests that the registered user/passwords are not allowed to
// connect & read
//
final String[][] badCredentials = {
{ "bad.user", "R&D" },
{ "measure", "bad.password" }
};
errorCount += connectAndRead(url,badCredentials,false,false);
}
if (errorCount > 0) {
final String err = "Test " + confname + " failed with " +
errorCount + " error(s)";
log.debug("testCommunication",err);
throw new RuntimeException(err);
}
}
/**
* Test the configuration indicated by `file'.
* Sets the appropriate System properties for config file and
* port and then calls ConnectorBootstrap.initialize().
* eventually cleans up by calling ConnectorBootstrap.terminate().
* @return null if the test succeeds, an error message otherwise.
**/
private String testConfiguration(File file,int port) throws BindException {
final String path;
try {
path=(file==null)?null:file.getCanonicalPath();
} catch(IOException x) {
final String err = "Failed to test configuration " + file +
": " + x;
log.trace("testConfiguration",err);
log.debug("testConfiguration",x);
return err;
}
final String config = (path==null)?"Default config file":path;
System.out.println("***");
System.out.println("*** Testing configuration (port=" + port + "): "
+ path);
System.out.println("***");
System.setProperty("com.sun.management.jmxremote.port",
Integer.toString(port));
if (path != null)
System.setProperty("com.sun.management.config.file", path);
else
System.getProperties().remove("com.sun.management.config.file");
log.trace("testConfiguration","com.sun.management.jmxremote.port="+port);
if (path != null && log.isDebugOn())
log.trace("testConfiguration",
"com.sun.management.config.file="+path);
checkSslConfiguration();
final JMXConnectorServer cs;
try {
cs = ConnectorBootstrap.initialize();
} catch (AgentConfigurationError x) {
if (x.getCause() instanceof ExportException) {
if (x.getCause().getCause() instanceof BindException) {
throw (BindException)x.getCause().getCause();
}
}
final String err = "Failed to initialize connector:" +
"\n\tcom.sun.management.jmxremote.port=" + port +
((path!=null)?"\n\tcom.sun.management.config.file="+path:
"\n\t"+config) +
"\n\tError is: " + x;
log.trace("testConfiguration",err);
log.debug("testConfiguration",x);
return err;
} catch (Exception x) {
log.debug("testConfiguration",x);
return x.toString();
}
try {
JMXServiceURL url =
new JMXServiceURL("rmi",null,0,"/jndi/rmi://localhost:"+
port+"/jmxrmi");
try {
testCommunication(url);
} catch (Exception x) {
final String err = "Failed to connect to agent {url="+url+
"}: " + x;
log.trace("testConfiguration",err);
log.debug("testConfiguration",x);
return err;
}
} catch (Exception x) {
final String err = "Failed to test configuration "+config+
": "+x;
log.trace("testConfiguration",err);
log.debug("testConfiguration",x);
return err;
} finally {
try {
cs.stop();
} catch (Exception x) {
final String err = "Failed to terminate: "+x;
log.trace("testConfiguration",err);
log.debug("testConfiguration",x);
}
}
System.out.println("Configuration " + config + " successfully tested");
return null;
}
/**
* Test a configuration file which should make the bootstrap fail.
* The test is assumed to have succeeded if the bootstrap fails.
* @return null if the test succeeds, an error message otherwise.
**/
private String testConfigurationKo(File conf,int port) {
String errStr = null;
for (int i = 0; i < PORT_TEST_LEN; i++) {
try {
errStr = testConfiguration(conf,port+testPort++);
break;
} catch (BindException e) {
// port conflict; try another port
}
}
if (errStr == null) {
return "Configuration " +
conf + " should have failed!";
}
System.out.println("Configuration " +
conf + " failed as expected");
log.debug("runko","Error was: " + errStr);
return null;
}
/**
* Test a configuration file. Determines whether the bootstrap
* should succeed or fail depending on the file name:
* *ok.properties: bootstrap should succeed.
* *ko.properties: bootstrap or connection should fail.
* @return null if the test succeeds, an error message otherwise.
**/
private String testConfigurationFile(String fileName) {
File file = new File(fileName);
final String portStr = System.getProperty("rmi.port",null);
final int port = portStr != null ?
Integer.parseInt(portStr) : basePort;
if (fileName.endsWith("ok.properties")) {
String errStr = null;
for (int i = 0; i < PORT_TEST_LEN; i++) {
try {
errStr = testConfiguration(file,port+testPort++);
return errStr;
} catch (BindException e) {
// port conflict; try another port
}
}
return "Can not locate available port";
}
if (fileName.endsWith("ko.properties")) {
return testConfigurationKo(file,port+testPort++);
}
return fileName +
": test file suffix must be one of [ko|ok].properties";
}
/**
* Find all *ko.property files and test them.
* (see findConfigurationFilesKo() and testConfigurationKo())
* @throws RuntimeException if the test fails.
**/
public void runko() {
final String portStr = System.getProperty("rmi.port",null);
final int port = portStr != null ?
Integer.parseInt(portStr) : basePort;
final File[] conf = findConfigurationFilesKo();
if ((conf == null)||(conf.length == 0))
throw new RuntimeException("No configuration found");
String errStr;
for (int i=0;i<conf.length;i++) {
errStr = testConfigurationKo(conf[i],port+testPort++);
if (errStr != null) {
throw new RuntimeException(errStr);
}
}
}
/**
* Find all *ok.property files and test them.
* (see findConfigurationFilesOk() and testConfiguration())
* @throws RuntimeException if the test fails.
**/
public void runok() {
final String portStr = System.getProperty("rmi.port",null);
final int port = portStr != null ?
Integer.parseInt(portStr) : basePort;
final File[] conf = findConfigurationFilesOk();
if ((conf == null)||(conf.length == 0))
throw new RuntimeException("No configuration found");
String errStr = null;
for (int i=0;i<conf.length;i++) {
for (int j = 0; j < PORT_TEST_LEN; i++) {
try {
errStr = testConfiguration(conf[i],port+testPort++);
break;
} catch (BindException e) {
// port conflict; try another port
}
}
if (errStr != null) {
throw new RuntimeException(errStr);
}
}
// FIXME: No jmxremote.password is not installed in JRE by default.
// - disable the following test case.
//
// Test default config
//
// errStr = testConfiguration(null,port+testPort++);
// if (errStr != null) {
// throw new RuntimeException(errStr);
// }
}
/**
* Finds all configuration files (*ok.properties and *ko.properties)
* and tests them.
* (see runko() and runok()).
* @throws RuntimeException if the test fails.
**/
public void run() {
runok();
runko();
}
/**
* Tests the specified configuration files.
* If args[] is not empty, each element in args[] is expected to be
* a filename ending either by ok.properties or ko.properties.
* Otherwise, the configuration files will be automatically determined
* by looking at all *.properties files located in the directory
* indicated by the System property "test.src".
* @throws RuntimeException if the test fails.
**/
public void run(String args[]) {
if (args.length == 0) {
run() ; return;
}
for (int i=0; i<args.length; i++) {
final String errStr =testConfigurationFile(args[i]);
if (errStr != null) {
throw new RuntimeException(errStr);
}
}
}
/**
* Calls run(args[]).
* exit(1) if the test fails.
**/
public static void main(String args[]) throws Exception {
setupBasePort();
RmiBootstrapTest manager = new RmiBootstrapTest();
try {
manager.run(args);
} catch (RuntimeException r) {
System.out.println("Test Failed: "+ r.getMessage());
System.exit(1);
} catch (Throwable t) {
System.out.println("Test Failed: "+ t);
t.printStackTrace();
System.exit(2);
}
System.out.println("**** Test RmiBootstrap Passed ****");
}
private static void setupBasePort() throws IOException {
try (ServerSocket s = new ServerSocket(0)) {
basePort = s.getLocalPort() + 1;
}
}
}
|
apache/ranger | 36,157 | security-admin/src/main/java/org/apache/ranger/patch/PatchForUpdatingTagsJson_J10020.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Consolidates Ranger policy details into a JSON string and stores it into a
* column in x_policy table After running this patch Ranger policy can be
* completely read/saved into x_policy table and some related Ref tables (which
* maintain ID->String mapping for each policy).
*
*/
package org.apache.ranger.patch;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.ranger.authorization.utils.JsonUtils;
import org.apache.ranger.authorization.utils.StringUtil;
import org.apache.ranger.biz.ServiceDBStore;
import org.apache.ranger.biz.TagDBStore;
import org.apache.ranger.db.RangerDaoManager;
import org.apache.ranger.db.XXServiceResourceDao;
import org.apache.ranger.db.XXTagDao;
import org.apache.ranger.db.XXTagDefDao;
import org.apache.ranger.entity.XXPortalUser;
import org.apache.ranger.entity.XXResourceDef;
import org.apache.ranger.entity.XXService;
import org.apache.ranger.entity.XXServiceResource;
import org.apache.ranger.entity.XXServiceResourceElement;
import org.apache.ranger.entity.XXServiceResourceElementValue;
import org.apache.ranger.entity.XXTag;
import org.apache.ranger.entity.XXTagAttribute;
import org.apache.ranger.entity.XXTagAttributeDef;
import org.apache.ranger.entity.XXTagDef;
import org.apache.ranger.plugin.model.RangerPolicy;
import org.apache.ranger.plugin.model.RangerService;
import org.apache.ranger.plugin.model.RangerServiceResource;
import org.apache.ranger.plugin.model.RangerTag;
import org.apache.ranger.plugin.model.RangerTagDef;
import org.apache.ranger.plugin.model.RangerValiditySchedule;
import org.apache.ranger.plugin.util.SearchFilter;
import org.apache.ranger.service.RangerServiceResourceService;
import org.apache.ranger.service.RangerTagDefService;
import org.apache.ranger.service.RangerTagService;
import org.apache.ranger.util.CLIUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.support.TransactionTemplate;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
@Component
public class PatchForUpdatingTagsJson_J10020 extends BaseLoader {
private static final Logger logger = LoggerFactory.getLogger(PatchForUpdatingTagsJson_J10020.class);
@Autowired
RangerDaoManager daoMgr;
@Autowired
ServiceDBStore svcStore;
@Autowired
TagDBStore tagStore;
@Autowired
@Qualifier(value = "transactionManager")
PlatformTransactionManager txManager;
@Autowired
RangerTagDefService tagDefService;
@Autowired
RangerTagService tagService;
@Autowired
RangerServiceResourceService serviceResourceService;
public static void main(String[] args) {
logger.info("main()");
try {
PatchForUpdatingTagsJson_J10020 loader = (PatchForUpdatingTagsJson_J10020) CLIUtil.getBean(PatchForUpdatingTagsJson_J10020.class);
loader.init();
while (loader.isMoreToProcess()) {
loader.load();
}
logger.info("Load complete. Exiting!!!");
System.exit(0);
} catch (Exception e) {
logger.error("Error loading", e);
System.exit(1);
}
}
@Override
public void init() throws Exception {
// Do Nothing
}
@Override
public void printStats() {
logger.info("Update Ranger Tags Tables with Json data ");
}
@Override
public void execLoad() {
logger.info("==> PatchForUpdatingTagsJson.execLoad()");
try {
updateRangerTagsTablesWithTagsJson();
} catch (Exception e) {
logger.error("Error while UpdateRangerTagsTablesWithTagsJson()", e);
System.exit(1);
}
logger.info("<== PatchForUpdatingTagsJson.execLoad()");
}
private void updateRangerTagsTablesWithTagsJson() throws Exception {
logger.info("==> updateRangerTagsTablesWithTagsJson() ");
List<RangerService> allServices = svcStore.getServices(new SearchFilter());
if (CollectionUtils.isNotEmpty(allServices)) {
TransactionTemplate txTemplate = new TransactionTemplate(txManager);
for (RangerService service : allServices) {
XXService dbService = daoMgr.getXXService().getById(service.getId());
RangerTagDBRetriever tagsRetriever = new RangerTagDBRetriever(daoMgr, txManager, dbService);
Map<Long, RangerTagDef> tagDefs = tagsRetriever.getTagDefs();
Map<Long, RangerTag> tags = tagsRetriever.getTags();
List<RangerServiceResource> serviceResources = tagsRetriever.getServiceResources();
XXTagDefDao tagDefDao = daoMgr.getXXTagDef();
XXTagDao tagDao = daoMgr.getXXTag();
XXServiceResourceDao serviceResourceDao = daoMgr.getXXServiceResource();
if (MapUtils.isNotEmpty(tagDefs)) {
logger.info("==> Port {} Tag Definitions for service(name={})", tagDefs.size(), dbService.getName());
for (Map.Entry<Long, RangerTagDef> entry : tagDefs.entrySet()) {
RangerTagDef tagDef = entry.getValue();
XXTagDef xTagDef = tagDefDao.getById(tagDef.getId());
if (xTagDef != null && StringUtils.isEmpty(xTagDef.getTagAttrDefs())) {
TagsUpdaterThread updaterThread = new TagsUpdaterThread(txTemplate, null, null, tagDef);
String errorMsg = runThread(updaterThread);
if (StringUtils.isNotEmpty(errorMsg)) {
throw new Exception(errorMsg);
}
}
}
}
if (MapUtils.isNotEmpty(tags)) {
logger.info("==> Port {} Tags for service(name={})", tags.size(), dbService.getName());
for (Map.Entry<Long, RangerTag> entry : tags.entrySet()) {
RangerTag tag = entry.getValue();
XXTag xTag = tagDao.getById(tag.getId());
if (xTag != null && StringUtils.isEmpty(xTag.getTagAttrs())) {
TagsUpdaterThread updaterThread = new TagsUpdaterThread(txTemplate, null, tag, null);
String errorMsg = runThread(updaterThread);
if (StringUtils.isNotEmpty(errorMsg)) {
throw new Exception(errorMsg);
}
}
}
}
if (CollectionUtils.isNotEmpty(serviceResources)) {
logger.info("==> Port {} Service Resources for service(name={})", serviceResources.size(), dbService.getName());
for (RangerServiceResource serviceResource : serviceResources) {
XXServiceResource xServiceResource = serviceResourceDao.getById(serviceResource.getId());
if (xServiceResource != null && StringUtils.isEmpty(xServiceResource.getServiceResourceElements())) {
TagsUpdaterThread updaterThread = new TagsUpdaterThread(txTemplate, serviceResource, null, null);
String errorMsg = runThread(updaterThread);
if (StringUtils.isNotEmpty(errorMsg)) {
throw new Exception(errorMsg);
}
}
}
}
}
}
logger.info("<== updateRangerTagsTablesWithTagsJson() ");
}
private String runThread(TagsUpdaterThread updaterThread) throws Exception {
updaterThread.setDaemon(true);
updaterThread.start();
updaterThread.join();
return updaterThread.getErrorMsg();
}
private void portTagDef(RangerTagDef tagDef) {
tagDefService.update(tagDef);
}
private void portTag(RangerTag tag) {
tagService.update(tag);
}
private void portServiceResource(RangerServiceResource serviceResource) throws Exception {
serviceResourceService.update(serviceResource);
tagStore.refreshServiceResource(serviceResource.getId());
}
private class TagsUpdaterThread extends Thread {
final TransactionTemplate txTemplate;
final RangerServiceResource serviceResource;
final RangerTag tag;
final RangerTagDef tagDef;
String errorMsg;
TagsUpdaterThread(TransactionTemplate txTemplate, final RangerServiceResource serviceResource, final RangerTag tag, final RangerTagDef tagDef) {
this.txTemplate = txTemplate;
this.serviceResource = serviceResource;
this.tag = tag;
this.tagDef = tagDef;
this.errorMsg = null;
}
public String getErrorMsg() {
return errorMsg;
}
@Override
public void run() {
errorMsg = txTemplate.execute(status -> {
String ret = null;
try {
if (serviceResource != null) {
portServiceResource(serviceResource);
}
if (tag != null) {
portTag(tag);
}
if (tagDef != null) {
portTagDef(tagDef);
}
} catch (Throwable e) {
logger.error("Port failed :[serviceResource={}, tag={}, tagDef={}]", serviceResource, tag, tagDef, e);
ret = e.toString();
}
return ret;
});
}
}
private static class RangerTagDBRetriever {
Logger logger = LoggerFactory.getLogger(RangerTagDBRetriever.class);
private final RangerDaoManager daoMgr;
private final XXService xService;
private final RangerTagDBRetriever.LookupCache lookupCache;
private List<RangerServiceResource> serviceResources;
private Map<Long, RangerTagDef> tagDefs;
private Map<Long, RangerTag> tags;
RangerTagDBRetriever(final RangerDaoManager daoMgr, final PlatformTransactionManager txManager, final XXService xService) throws InterruptedException {
this.daoMgr = daoMgr;
this.xService = xService;
this.lookupCache = new RangerTagDBRetriever.LookupCache();
TransactionTemplate txTemplate;
if (txManager != null) {
txTemplate = new TransactionTemplate(txManager);
txTemplate.setReadOnly(true);
} else {
txTemplate = null;
}
if (this.daoMgr != null && this.xService != null) {
if (txTemplate == null) {
logger.debug("Load Tags in the same thread and using an existing transaction");
if (!initializeTagCache(xService)) {
logger.error("Failed to get tags for service:[{}}] in the same thread and using an existing transaction", xService.getName());
}
} else {
logger.debug("Load Tags in a separate thread and using a new transaction");
RangerTagDBRetriever.TagLoaderThread t = new RangerTagDBRetriever.TagLoaderThread(txTemplate, xService);
t.setDaemon(true);
t.start();
t.join();
}
}
}
List<RangerServiceResource> getServiceResources() {
return serviceResources;
}
Map<Long, RangerTagDef> getTagDefs() {
return tagDefs;
}
Map<Long, RangerTag> getTags() {
return tags;
}
private boolean initializeTagCache(XXService xService) {
RangerTagDBRetriever.TagRetrieverServiceResourceContext serviceResourceContext = new RangerTagDBRetriever.TagRetrieverServiceResourceContext(xService);
RangerTagDBRetriever.TagRetrieverTagDefContext tagDefContext = new RangerTagDBRetriever.TagRetrieverTagDefContext(xService);
RangerTagDBRetriever.TagRetrieverTagContext tagContext = new RangerTagDBRetriever.TagRetrieverTagContext(xService);
serviceResources = serviceResourceContext.getAllServiceResources();
tagDefs = tagDefContext.getAllTagDefs();
tags = tagContext.getAllTags();
return true;
}
private <T> List<T> asList(T obj) {
List<T> ret = new ArrayList<>();
if (obj != null) {
ret.add(obj);
}
return ret;
}
private class LookupCache {
final Map<Long, String> userScreenNames = new HashMap<>();
final Map<Long, String> resourceDefs = new HashMap<>();
String getUserScreenName(Long userId) {
String ret = null;
if (userId != null) {
ret = userScreenNames.get(userId);
if (ret == null) {
XXPortalUser user = daoMgr.getXXPortalUser().getById(userId);
if (user != null) {
ret = user.getPublicScreenName();
if (StringUtil.isEmpty(ret)) {
ret = user.getFirstName();
if (StringUtil.isEmpty(ret)) {
ret = user.getLoginId();
} else {
if (!StringUtil.isEmpty(user.getLastName())) {
ret += (" " + user.getLastName());
}
}
}
if (ret != null) {
userScreenNames.put(userId, ret);
}
}
}
}
return ret;
}
String getResourceName(Long resourceDefId) {
String ret = null;
if (resourceDefId != null) {
ret = resourceDefs.get(resourceDefId);
if (ret == null) {
XXResourceDef xResourceDef = daoMgr.getXXResourceDef().getById(resourceDefId);
if (xResourceDef != null) {
ret = xResourceDef.getName();
resourceDefs.put(resourceDefId, ret);
}
}
}
return ret;
}
}
private class TagLoaderThread extends Thread {
final TransactionTemplate txTemplate;
final XXService xService;
TagLoaderThread(TransactionTemplate txTemplate, final XXService xService) {
this.txTemplate = txTemplate;
this.xService = xService;
}
@Override
public void run() {
txTemplate.setReadOnly(true);
Boolean result = txTemplate.execute(status -> {
boolean ret = initializeTagCache(xService);
if (!ret) {
status.setRollbackOnly();
logger.error("Failed to get tags for service:[{}] in a new transaction", xService.getName());
}
return ret;
});
logger.debug("transaction result:[{}]", result);
}
}
private class TagRetrieverServiceResourceContext {
final XXService service;
final ListIterator<XXServiceResource> iterServiceResource;
final ListIterator<XXServiceResourceElement> iterServiceResourceElement;
final ListIterator<XXServiceResourceElementValue> iterServiceResourceElementValue;
TagRetrieverServiceResourceContext(XXService xService) {
Long serviceId = xService == null ? null : xService.getId();
List<XXServiceResource> xServiceResources = daoMgr.getXXServiceResource().findByServiceId(serviceId);
List<XXServiceResourceElement> xServiceResourceElements = daoMgr.getXXServiceResourceElement().findTaggedResourcesInServiceId(serviceId);
List<XXServiceResourceElementValue> xServiceResourceElementValues = daoMgr.getXXServiceResourceElementValue().findTaggedResourcesInServiceId(serviceId);
this.service = xService;
this.iterServiceResource = xServiceResources.listIterator();
this.iterServiceResourceElement = xServiceResourceElements.listIterator();
this.iterServiceResourceElementValue = xServiceResourceElementValues.listIterator();
}
TagRetrieverServiceResourceContext(XXServiceResource xServiceResource, XXService xService) {
Long resourceId = xServiceResource == null ? null : xServiceResource.getId();
List<XXServiceResource> xServiceResources = asList(xServiceResource);
List<XXServiceResourceElement> xServiceResourceElements = daoMgr.getXXServiceResourceElement().findByResourceId(resourceId);
List<XXServiceResourceElementValue> xServiceResourceElementValues = daoMgr.getXXServiceResourceElementValue().findByResourceId(resourceId);
this.service = xService;
this.iterServiceResource = xServiceResources.listIterator();
this.iterServiceResourceElement = xServiceResourceElements.listIterator();
this.iterServiceResourceElementValue = xServiceResourceElementValues.listIterator();
}
List<RangerServiceResource> getAllServiceResources() {
List<RangerServiceResource> ret = new ArrayList<>();
while (iterServiceResource.hasNext()) {
RangerServiceResource serviceResource = getNextServiceResource();
if (serviceResource != null) {
ret.add(serviceResource);
}
}
if (!hasProcessedAll()) {
logger.warn("getAllServiceResources(): perhaps one or more serviceResources got updated during retrieval. Using fallback ... ");
ret = getServiceResourcesBySecondary();
}
return ret;
}
RangerServiceResource getNextServiceResource() {
RangerServiceResource ret = null;
if (iterServiceResource.hasNext()) {
XXServiceResource xServiceResource = iterServiceResource.next();
if (xServiceResource != null) {
ret = new RangerServiceResource();
ret.setId(xServiceResource.getId());
ret.setGuid(xServiceResource.getGuid());
ret.setIsEnabled(xServiceResource.getIsEnabled());
ret.setCreatedBy(lookupCache.getUserScreenName(xServiceResource.getAddedByUserId()));
ret.setUpdatedBy(lookupCache.getUserScreenName(xServiceResource.getUpdatedByUserId()));
ret.setCreateTime(xServiceResource.getCreateTime());
ret.setUpdateTime(xServiceResource.getUpdateTime());
ret.setVersion(xServiceResource.getVersion());
ret.setResourceSignature(xServiceResource.getResourceSignature());
ret.setServiceName(xService.getName());
getServiceResourceElements(ret);
}
}
return ret;
}
void getServiceResourceElements(RangerServiceResource serviceResource) {
while (iterServiceResourceElement.hasNext()) {
XXServiceResourceElement xServiceResourceElement = iterServiceResourceElement.next();
if (xServiceResourceElement.getResourceId().equals(serviceResource.getId())) {
RangerPolicy.RangerPolicyResource resource = new RangerPolicy.RangerPolicyResource();
resource.setIsExcludes(xServiceResourceElement.getIsExcludes());
resource.setIsRecursive(xServiceResourceElement.getIsRecursive());
while (iterServiceResourceElementValue.hasNext()) {
XXServiceResourceElementValue xServiceResourceElementValue = iterServiceResourceElementValue.next();
if (xServiceResourceElementValue.getResElementId().equals(xServiceResourceElement.getId())) {
resource.addValue(xServiceResourceElementValue.getValue());
} else {
if (iterServiceResourceElementValue.hasPrevious()) {
iterServiceResourceElementValue.previous();
}
break;
}
}
serviceResource.getResourceElements().put(lookupCache.getResourceName(xServiceResourceElement.getResDefId()), resource);
} else if (xServiceResourceElement.getResourceId().compareTo(serviceResource.getId()) > 0) {
if (iterServiceResourceElement.hasPrevious()) {
iterServiceResourceElement.previous();
}
break;
}
}
}
boolean hasProcessedAll() {
boolean moreToProcess = iterServiceResource.hasNext() || iterServiceResourceElement.hasNext() || iterServiceResourceElementValue.hasNext();
return !moreToProcess;
}
List<RangerServiceResource> getServiceResourcesBySecondary() {
List<RangerServiceResource> ret = null;
if (service != null) {
List<XXServiceResource> xServiceResources = daoMgr.getXXServiceResource().findTaggedResourcesInServiceId(service.getId());
if (CollectionUtils.isNotEmpty(xServiceResources)) {
ret = new ArrayList<>(xServiceResources.size());
for (XXServiceResource xServiceResource : xServiceResources) {
RangerTagDBRetriever.TagRetrieverServiceResourceContext ctx = new RangerTagDBRetriever.TagRetrieverServiceResourceContext(xServiceResource, service);
RangerServiceResource serviceResource = ctx.getNextServiceResource();
if (serviceResource != null) {
ret.add(serviceResource);
}
}
}
}
return ret;
}
}
private class TagRetrieverTagDefContext {
final XXService service;
final ListIterator<XXTagDef> iterTagDef;
final ListIterator<XXTagAttributeDef> iterTagAttributeDef;
TagRetrieverTagDefContext(XXService xService) {
Long serviceId = xService == null ? null : xService.getId();
List<XXTagDef> xTagDefs = daoMgr.getXXTagDef().findByServiceId(serviceId);
List<XXTagAttributeDef> xTagAttributeDefs = daoMgr.getXXTagAttributeDef().findByServiceId(serviceId);
this.service = xService;
this.iterTagDef = xTagDefs.listIterator();
this.iterTagAttributeDef = xTagAttributeDefs.listIterator();
}
TagRetrieverTagDefContext(XXTagDef xTagDef, XXService xService) {
Long tagDefId = xTagDef == null ? null : xTagDef.getId();
List<XXTagDef> xTagDefs = asList(xTagDef);
List<XXTagAttributeDef> xTagAttributeDefs = daoMgr.getXXTagAttributeDef().findByTagDefId(tagDefId);
this.service = xService;
this.iterTagDef = xTagDefs.listIterator();
this.iterTagAttributeDef = xTagAttributeDefs.listIterator();
}
Map<Long, RangerTagDef> getAllTagDefs() {
Map<Long, RangerTagDef> ret = new HashMap<>();
while (iterTagDef.hasNext()) {
RangerTagDef tagDef = getNextTagDef();
if (tagDef != null) {
ret.put(tagDef.getId(), tagDef);
}
}
if (!hasProcessedAllTagDefs()) {
logger.warn("getAllTagDefs(): perhaps one or more tag-definitions got updated during retrieval. Using fallback ... ");
ret = getTagDefsBySecondary();
}
return ret;
}
RangerTagDef getNextTagDef() {
RangerTagDef ret = null;
if (iterTagDef.hasNext()) {
XXTagDef xTagDef = iterTagDef.next();
if (xTagDef != null) {
ret = new RangerTagDef();
ret.setId(xTagDef.getId());
ret.setGuid(xTagDef.getGuid());
ret.setIsEnabled(xTagDef.getIsEnabled());
ret.setCreatedBy(lookupCache.getUserScreenName(xTagDef.getAddedByUserId()));
ret.setUpdatedBy(lookupCache.getUserScreenName(xTagDef.getUpdatedByUserId()));
ret.setCreateTime(xTagDef.getCreateTime());
ret.setUpdateTime(xTagDef.getUpdateTime());
ret.setVersion(xTagDef.getVersion());
ret.setName(xTagDef.getName());
ret.setSource(xTagDef.getSource());
getTagAttributeDefs(ret);
}
}
return ret;
}
void getTagAttributeDefs(RangerTagDef tagDef) {
while (iterTagAttributeDef.hasNext()) {
XXTagAttributeDef xTagAttributeDef = iterTagAttributeDef.next();
if (xTagAttributeDef.getTagDefId().equals(tagDef.getId())) {
RangerTagDef.RangerTagAttributeDef tagAttributeDef = new RangerTagDef.RangerTagAttributeDef();
tagAttributeDef.setName(xTagAttributeDef.getName());
tagAttributeDef.setType(xTagAttributeDef.getType());
tagDef.getAttributeDefs().add(tagAttributeDef);
} else if (xTagAttributeDef.getTagDefId().compareTo(tagDef.getId()) > 0) {
if (iterTagAttributeDef.hasPrevious()) {
iterTagAttributeDef.previous();
}
break;
}
}
}
boolean hasProcessedAllTagDefs() {
boolean moreToProcess = iterTagAttributeDef.hasNext();
return !moreToProcess;
}
Map<Long, RangerTagDef> getTagDefsBySecondary() {
Map<Long, RangerTagDef> ret = null;
if (service != null) {
List<XXTagDef> xTagDefs = daoMgr.getXXTagDef().findByServiceId(service.getId());
if (CollectionUtils.isNotEmpty(xTagDefs)) {
ret = new HashMap<>(xTagDefs.size());
for (XXTagDef xTagDef : xTagDefs) {
TagRetrieverTagDefContext ctx = new TagRetrieverTagDefContext(xTagDef, service);
RangerTagDef tagDef = ctx.getNextTagDef();
if (tagDef != null) {
ret.put(tagDef.getId(), tagDef);
}
}
}
}
return ret;
}
}
private class TagRetrieverTagContext {
final XXService service;
final ListIterator<XXTag> iterTag;
final ListIterator<XXTagAttribute> iterTagAttribute;
TagRetrieverTagContext(XXService xService) {
Long serviceId = xService == null ? null : xService.getId();
List<XXTag> xTags = daoMgr.getXXTag().findByServiceId(serviceId);
List<XXTagAttribute> xTagAttributes = daoMgr.getXXTagAttribute().findByServiceId(serviceId);
this.service = xService;
this.iterTag = xTags.listIterator();
this.iterTagAttribute = xTagAttributes.listIterator();
}
TagRetrieverTagContext(XXTag xTag, XXService xService) {
Long tagId = xTag == null ? null : xTag.getId();
List<XXTag> xTags = asList(xTag);
List<XXTagAttribute> xTagAttributes = daoMgr.getXXTagAttribute().findByTagId(tagId);
this.service = xService;
this.iterTag = xTags.listIterator();
this.iterTagAttribute = xTagAttributes.listIterator();
}
Map<Long, RangerTag> getAllTags() {
Map<Long, RangerTag> ret = new HashMap<>();
while (iterTag.hasNext()) {
RangerTag tag = getNextTag();
if (tag != null) {
ret.put(tag.getId(), tag);
}
}
if (!hasProcessedAllTags()) {
logger.warn("getAllTags(): perhaps one or more tags got updated during retrieval. Using fallback ... ");
ret = getTagsBySecondary();
}
return ret;
}
RangerTag getNextTag() {
RangerTag ret = null;
if (iterTag.hasNext()) {
XXTag xTag = iterTag.next();
if (xTag != null) {
ret = new RangerTag();
ret.setId(xTag.getId());
ret.setGuid(xTag.getGuid());
ret.setOwner(xTag.getOwner());
ret.setCreatedBy(lookupCache.getUserScreenName(xTag.getAddedByUserId()));
ret.setUpdatedBy(lookupCache.getUserScreenName(xTag.getUpdatedByUserId()));
ret.setCreateTime(xTag.getCreateTime());
ret.setUpdateTime(xTag.getUpdateTime());
ret.setVersion(xTag.getVersion());
Map<String, String> mapOfOptions = JsonUtils.jsonToMapStringString(xTag.getOptions());
if (MapUtils.isNotEmpty(mapOfOptions)) {
String validityPeriodsStr = mapOfOptions.get(RangerTag.OPTION_TAG_VALIDITY_PERIODS);
if (StringUtils.isNotEmpty(validityPeriodsStr)) {
List<RangerValiditySchedule> validityPeriods = JsonUtils.jsonToRangerValiditySchedule(validityPeriodsStr);
ret.setValidityPeriods(validityPeriods);
}
}
Map<Long, RangerTagDef> tagDefs = getTagDefs();
if (tagDefs != null) {
RangerTagDef tagDef = tagDefs.get(xTag.getType());
if (tagDef != null) {
ret.setType(tagDef.getName());
}
}
getTagAttributes(ret);
}
}
return ret;
}
void getTagAttributes(RangerTag tag) {
while (iterTagAttribute.hasNext()) {
XXTagAttribute xTagAttribute = iterTagAttribute.next();
if (xTagAttribute.getTagId().equals(tag.getId())) {
String attributeName = xTagAttribute.getName();
String attributeValue = xTagAttribute.getValue();
if (tag.getAttributes() == null) {
tag.setAttributes(new HashMap<>());
}
tag.getAttributes().put(attributeName, attributeValue);
} else if (xTagAttribute.getTagId().compareTo(tag.getId()) > 0) {
if (iterTagAttribute.hasPrevious()) {
iterTagAttribute.previous();
}
break;
}
}
}
boolean hasProcessedAllTags() {
boolean moreToProcess = iterTagAttribute.hasNext();
return !moreToProcess;
}
Map<Long, RangerTag> getTagsBySecondary() {
Map<Long, RangerTag> ret = null;
if (service != null) {
List<XXTag> xTags = daoMgr.getXXTag().findByServiceId(service.getId());
if (CollectionUtils.isNotEmpty(xTags)) {
ret = new HashMap<>(xTags.size());
for (XXTag xTag : xTags) {
TagRetrieverTagContext ctx = new TagRetrieverTagContext(xTag, service);
RangerTag tag = ctx.getNextTag();
if (tag != null) {
ret.put(tag.getId(), tag);
}
}
}
}
return ret;
}
}
}
}
|
google/java-photoslibrary | 35,906 | photoslibraryapi/src/main/java/com/google/photos/library/v1/proto/ListAlbumsResponse.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/photos/library/v1/photos_library.proto
package com.google.photos.library.v1.proto;
/**
*
*
* <pre>
* List of albums requested.
* </pre>
*
* Protobuf type {@code google.photos.library.v1.ListAlbumsResponse}
*/
public final class ListAlbumsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.photos.library.v1.ListAlbumsResponse)
ListAlbumsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListAlbumsResponse.newBuilder() to construct.
private ListAlbumsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListAlbumsResponse() {
albums_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListAlbumsResponse();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.photos.library.v1.proto.LibraryServiceProto
.internal_static_google_photos_library_v1_ListAlbumsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.photos.library.v1.proto.LibraryServiceProto
.internal_static_google_photos_library_v1_ListAlbumsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.photos.library.v1.proto.ListAlbumsResponse.class,
com.google.photos.library.v1.proto.ListAlbumsResponse.Builder.class);
}
public static final int ALBUMS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.photos.types.proto.Album> albums_;
/**
*
*
* <pre>
* Output only. List of albums shown in the Albums tab of the user's Google
* Photos app.
* </pre>
*
* <code>repeated .google.photos.types.Album albums = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.photos.types.proto.Album> getAlbumsList() {
return albums_;
}
/**
*
*
* <pre>
* Output only. List of albums shown in the Albums tab of the user's Google
* Photos app.
* </pre>
*
* <code>repeated .google.photos.types.Album albums = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.photos.types.proto.AlbumOrBuilder>
getAlbumsOrBuilderList() {
return albums_;
}
/**
*
*
* <pre>
* Output only. List of albums shown in the Albums tab of the user's Google
* Photos app.
* </pre>
*
* <code>repeated .google.photos.types.Album albums = 1;</code>
*/
@java.lang.Override
public int getAlbumsCount() {
return albums_.size();
}
/**
*
*
* <pre>
* Output only. List of albums shown in the Albums tab of the user's Google
* Photos app.
* </pre>
*
* <code>repeated .google.photos.types.Album albums = 1;</code>
*/
@java.lang.Override
public com.google.photos.types.proto.Album getAlbums(int index) {
return albums_.get(index);
}
/**
*
*
* <pre>
* Output only. List of albums shown in the Albums tab of the user's Google
* Photos app.
* </pre>
*
* <code>repeated .google.photos.types.Album albums = 1;</code>
*/
@java.lang.Override
public com.google.photos.types.proto.AlbumOrBuilder getAlbumsOrBuilder(int index) {
return albums_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Output only. Token to use to get the next set of albums. Populated if
* there are more albums to retrieve for this request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. Token to use to get the next set of albums. Populated if
* there are more albums to retrieve for this request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < albums_.size(); i++) {
output.writeMessage(1, albums_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < albums_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, albums_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.photos.library.v1.proto.ListAlbumsResponse)) {
return super.equals(obj);
}
com.google.photos.library.v1.proto.ListAlbumsResponse other =
(com.google.photos.library.v1.proto.ListAlbumsResponse) obj;
if (!getAlbumsList().equals(other.getAlbumsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAlbumsCount() > 0) {
hash = (37 * hash) + ALBUMS_FIELD_NUMBER;
hash = (53 * hash) + getAlbumsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.photos.library.v1.proto.ListAlbumsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.photos.library.v1.proto.ListAlbumsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.photos.library.v1.proto.ListAlbumsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.photos.library.v1.proto.ListAlbumsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.photos.library.v1.proto.ListAlbumsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.photos.library.v1.proto.ListAlbumsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.photos.library.v1.proto.ListAlbumsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.photos.library.v1.proto.ListAlbumsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.photos.library.v1.proto.ListAlbumsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.photos.library.v1.proto.ListAlbumsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.photos.library.v1.proto.ListAlbumsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.photos.library.v1.proto.ListAlbumsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.photos.library.v1.proto.ListAlbumsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* List of albums requested.
* </pre>
*
* Protobuf type {@code google.photos.library.v1.ListAlbumsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.photos.library.v1.ListAlbumsResponse)
com.google.photos.library.v1.proto.ListAlbumsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.photos.library.v1.proto.LibraryServiceProto
.internal_static_google_photos_library_v1_ListAlbumsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.photos.library.v1.proto.LibraryServiceProto
.internal_static_google_photos_library_v1_ListAlbumsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.photos.library.v1.proto.ListAlbumsResponse.class,
com.google.photos.library.v1.proto.ListAlbumsResponse.Builder.class);
}
// Construct using com.google.photos.library.v1.proto.ListAlbumsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (albumsBuilder_ == null) {
albums_ = java.util.Collections.emptyList();
} else {
albums_ = null;
albumsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.photos.library.v1.proto.LibraryServiceProto
.internal_static_google_photos_library_v1_ListAlbumsResponse_descriptor;
}
@java.lang.Override
public com.google.photos.library.v1.proto.ListAlbumsResponse getDefaultInstanceForType() {
return com.google.photos.library.v1.proto.ListAlbumsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.photos.library.v1.proto.ListAlbumsResponse build() {
com.google.photos.library.v1.proto.ListAlbumsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.photos.library.v1.proto.ListAlbumsResponse buildPartial() {
com.google.photos.library.v1.proto.ListAlbumsResponse result =
new com.google.photos.library.v1.proto.ListAlbumsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.photos.library.v1.proto.ListAlbumsResponse result) {
if (albumsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
albums_ = java.util.Collections.unmodifiableList(albums_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.albums_ = albums_;
} else {
result.albums_ = albumsBuilder_.build();
}
}
private void buildPartial0(com.google.photos.library.v1.proto.ListAlbumsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.photos.library.v1.proto.ListAlbumsResponse) {
return mergeFrom((com.google.photos.library.v1.proto.ListAlbumsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.photos.library.v1.proto.ListAlbumsResponse other) {
if (other == com.google.photos.library.v1.proto.ListAlbumsResponse.getDefaultInstance())
return this;
if (albumsBuilder_ == null) {
if (!other.albums_.isEmpty()) {
if (albums_.isEmpty()) {
albums_ = other.albums_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAlbumsIsMutable();
albums_.addAll(other.albums_);
}
onChanged();
}
} else {
if (!other.albums_.isEmpty()) {
if (albumsBuilder_.isEmpty()) {
albumsBuilder_.dispose();
albumsBuilder_ = null;
albums_ = other.albums_;
bitField0_ = (bitField0_ & ~0x00000001);
albumsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAlbumsFieldBuilder()
: null;
} else {
albumsBuilder_.addAllMessages(other.albums_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.photos.types.proto.Album m =
input.readMessage(
com.google.photos.types.proto.Album.parser(), extensionRegistry);
if (albumsBuilder_ == null) {
ensureAlbumsIsMutable();
albums_.add(m);
} else {
albumsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.photos.types.proto.Album> albums_ =
java.util.Collections.emptyList();
private void ensureAlbumsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
albums_ = new java.util.ArrayList<com.google.photos.types.proto.Album>(albums_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.photos.types.proto.Album,
com.google.photos.types.proto.Album.Builder,
com.google.photos.types.proto.AlbumOrBuilder>
albumsBuilder_;
/**
*
*
* <pre>
* Output only. List of albums shown in the Albums tab of the user's Google
* Photos app.
* </pre>
*
* <code>repeated .google.photos.types.Album albums = 1;</code>
*/
public java.util.List<com.google.photos.types.proto.Album> getAlbumsList() {
if (albumsBuilder_ == null) {
return java.util.Collections.unmodifiableList(albums_);
} else {
return albumsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Output only. List of albums shown in the Albums tab of the user's Google
* Photos app.
* </pre>
*
* <code>repeated .google.photos.types.Album albums = 1;</code>
*/
public int getAlbumsCount() {
if (albumsBuilder_ == null) {
return albums_.size();
} else {
return albumsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Output only. List of albums shown in the Albums tab of the user's Google
* Photos app.
* </pre>
*
* <code>repeated .google.photos.types.Album albums = 1;</code>
*/
public com.google.photos.types.proto.Album getAlbums(int index) {
if (albumsBuilder_ == null) {
return albums_.get(index);
} else {
return albumsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Output only. List of albums shown in the Albums tab of the user's Google
* Photos app.
* </pre>
*
* <code>repeated .google.photos.types.Album albums = 1;</code>
*/
public Builder setAlbums(int index, com.google.photos.types.proto.Album value) {
if (albumsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAlbumsIsMutable();
albums_.set(index, value);
onChanged();
} else {
albumsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Output only. List of albums shown in the Albums tab of the user's Google
* Photos app.
* </pre>
*
* <code>repeated .google.photos.types.Album albums = 1;</code>
*/
public Builder setAlbums(
int index, com.google.photos.types.proto.Album.Builder builderForValue) {
if (albumsBuilder_ == null) {
ensureAlbumsIsMutable();
albums_.set(index, builderForValue.build());
onChanged();
} else {
albumsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. List of albums shown in the Albums tab of the user's Google
* Photos app.
* </pre>
*
* <code>repeated .google.photos.types.Album albums = 1;</code>
*/
public Builder addAlbums(com.google.photos.types.proto.Album value) {
if (albumsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAlbumsIsMutable();
albums_.add(value);
onChanged();
} else {
albumsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Output only. List of albums shown in the Albums tab of the user's Google
* Photos app.
* </pre>
*
* <code>repeated .google.photos.types.Album albums = 1;</code>
*/
public Builder addAlbums(int index, com.google.photos.types.proto.Album value) {
if (albumsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAlbumsIsMutable();
albums_.add(index, value);
onChanged();
} else {
albumsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Output only. List of albums shown in the Albums tab of the user's Google
* Photos app.
* </pre>
*
* <code>repeated .google.photos.types.Album albums = 1;</code>
*/
public Builder addAlbums(com.google.photos.types.proto.Album.Builder builderForValue) {
if (albumsBuilder_ == null) {
ensureAlbumsIsMutable();
albums_.add(builderForValue.build());
onChanged();
} else {
albumsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. List of albums shown in the Albums tab of the user's Google
* Photos app.
* </pre>
*
* <code>repeated .google.photos.types.Album albums = 1;</code>
*/
public Builder addAlbums(
int index, com.google.photos.types.proto.Album.Builder builderForValue) {
if (albumsBuilder_ == null) {
ensureAlbumsIsMutable();
albums_.add(index, builderForValue.build());
onChanged();
} else {
albumsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. List of albums shown in the Albums tab of the user's Google
* Photos app.
* </pre>
*
* <code>repeated .google.photos.types.Album albums = 1;</code>
*/
public Builder addAllAlbums(
java.lang.Iterable<? extends com.google.photos.types.proto.Album> values) {
if (albumsBuilder_ == null) {
ensureAlbumsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, albums_);
onChanged();
} else {
albumsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Output only. List of albums shown in the Albums tab of the user's Google
* Photos app.
* </pre>
*
* <code>repeated .google.photos.types.Album albums = 1;</code>
*/
public Builder clearAlbums() {
if (albumsBuilder_ == null) {
albums_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
albumsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Output only. List of albums shown in the Albums tab of the user's Google
* Photos app.
* </pre>
*
* <code>repeated .google.photos.types.Album albums = 1;</code>
*/
public Builder removeAlbums(int index) {
if (albumsBuilder_ == null) {
ensureAlbumsIsMutable();
albums_.remove(index);
onChanged();
} else {
albumsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Output only. List of albums shown in the Albums tab of the user's Google
* Photos app.
* </pre>
*
* <code>repeated .google.photos.types.Album albums = 1;</code>
*/
public com.google.photos.types.proto.Album.Builder getAlbumsBuilder(int index) {
return getAlbumsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Output only. List of albums shown in the Albums tab of the user's Google
* Photos app.
* </pre>
*
* <code>repeated .google.photos.types.Album albums = 1;</code>
*/
public com.google.photos.types.proto.AlbumOrBuilder getAlbumsOrBuilder(int index) {
if (albumsBuilder_ == null) {
return albums_.get(index);
} else {
return albumsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Output only. List of albums shown in the Albums tab of the user's Google
* Photos app.
* </pre>
*
* <code>repeated .google.photos.types.Album albums = 1;</code>
*/
public java.util.List<? extends com.google.photos.types.proto.AlbumOrBuilder>
getAlbumsOrBuilderList() {
if (albumsBuilder_ != null) {
return albumsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(albums_);
}
}
/**
*
*
* <pre>
* Output only. List of albums shown in the Albums tab of the user's Google
* Photos app.
* </pre>
*
* <code>repeated .google.photos.types.Album albums = 1;</code>
*/
public com.google.photos.types.proto.Album.Builder addAlbumsBuilder() {
return getAlbumsFieldBuilder()
.addBuilder(com.google.photos.types.proto.Album.getDefaultInstance());
}
/**
*
*
* <pre>
* Output only. List of albums shown in the Albums tab of the user's Google
* Photos app.
* </pre>
*
* <code>repeated .google.photos.types.Album albums = 1;</code>
*/
public com.google.photos.types.proto.Album.Builder addAlbumsBuilder(int index) {
return getAlbumsFieldBuilder()
.addBuilder(index, com.google.photos.types.proto.Album.getDefaultInstance());
}
/**
*
*
* <pre>
* Output only. List of albums shown in the Albums tab of the user's Google
* Photos app.
* </pre>
*
* <code>repeated .google.photos.types.Album albums = 1;</code>
*/
public java.util.List<com.google.photos.types.proto.Album.Builder> getAlbumsBuilderList() {
return getAlbumsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.photos.types.proto.Album,
com.google.photos.types.proto.Album.Builder,
com.google.photos.types.proto.AlbumOrBuilder>
getAlbumsFieldBuilder() {
if (albumsBuilder_ == null) {
albumsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.photos.types.proto.Album,
com.google.photos.types.proto.Album.Builder,
com.google.photos.types.proto.AlbumOrBuilder>(
albums_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
albums_ = null;
}
return albumsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Output only. Token to use to get the next set of albums. Populated if
* there are more albums to retrieve for this request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. Token to use to get the next set of albums. Populated if
* there are more albums to retrieve for this request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. Token to use to get the next set of albums. Populated if
* there are more albums to retrieve for this request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Token to use to get the next set of albums. Populated if
* there are more albums to retrieve for this request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Token to use to get the next set of albums. Populated if
* there are more albums to retrieve for this request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.photos.library.v1.ListAlbumsResponse)
}
// @@protoc_insertion_point(class_scope:google.photos.library.v1.ListAlbumsResponse)
private static final com.google.photos.library.v1.proto.ListAlbumsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.photos.library.v1.proto.ListAlbumsResponse();
}
public static com.google.photos.library.v1.proto.ListAlbumsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListAlbumsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListAlbumsResponse>() {
@java.lang.Override
public ListAlbumsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListAlbumsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListAlbumsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.photos.library.v1.proto.ListAlbumsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,899 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SearchNearestEntitiesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/feature_online_store_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* The request message for
* [FeatureOnlineStoreService.SearchNearestEntities][google.cloud.aiplatform.v1beta1.FeatureOnlineStoreService.SearchNearestEntities].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest}
*/
public final class SearchNearestEntitiesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest)
SearchNearestEntitiesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use SearchNearestEntitiesRequest.newBuilder() to construct.
private SearchNearestEntitiesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SearchNearestEntitiesRequest() {
featureView_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SearchNearestEntitiesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.FeatureOnlineStoreServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_SearchNearestEntitiesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.FeatureOnlineStoreServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_SearchNearestEntitiesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest.class,
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest.Builder.class);
}
private int bitField0_;
public static final int FEATURE_VIEW_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object featureView_ = "";
/**
*
*
* <pre>
* Required. FeatureView resource format
* `projects/{project}/locations/{location}/featureOnlineStores/{featureOnlineStore}/featureViews/{featureView}`
* </pre>
*
* <code>
* string feature_view = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The featureView.
*/
@java.lang.Override
public java.lang.String getFeatureView() {
java.lang.Object ref = featureView_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
featureView_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. FeatureView resource format
* `projects/{project}/locations/{location}/featureOnlineStores/{featureOnlineStore}/featureViews/{featureView}`
* </pre>
*
* <code>
* string feature_view = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for featureView.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFeatureViewBytes() {
java.lang.Object ref = featureView_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
featureView_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int QUERY_FIELD_NUMBER = 2;
private com.google.cloud.aiplatform.v1beta1.NearestNeighborQuery query_;
/**
*
*
* <pre>
* Required. The query.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the query field is set.
*/
@java.lang.Override
public boolean hasQuery() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The query.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The query.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.NearestNeighborQuery getQuery() {
return query_ == null
? com.google.cloud.aiplatform.v1beta1.NearestNeighborQuery.getDefaultInstance()
: query_;
}
/**
*
*
* <pre>
* Required. The query.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.NearestNeighborQueryOrBuilder getQueryOrBuilder() {
return query_ == null
? com.google.cloud.aiplatform.v1beta1.NearestNeighborQuery.getDefaultInstance()
: query_;
}
public static final int RETURN_FULL_ENTITY_FIELD_NUMBER = 3;
private boolean returnFullEntity_ = false;
/**
*
*
* <pre>
* Optional. If set to true, the full entities (including all vector values
* and metadata) of the nearest neighbors are returned; otherwise only entity
* id of the nearest neighbors will be returned. Note that returning full
* entities will significantly increase the latency and cost of the query.
* </pre>
*
* <code>bool return_full_entity = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The returnFullEntity.
*/
@java.lang.Override
public boolean getReturnFullEntity() {
return returnFullEntity_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(featureView_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, featureView_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getQuery());
}
if (returnFullEntity_ != false) {
output.writeBool(3, returnFullEntity_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(featureView_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, featureView_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getQuery());
}
if (returnFullEntity_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, returnFullEntity_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest other =
(com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest) obj;
if (!getFeatureView().equals(other.getFeatureView())) return false;
if (hasQuery() != other.hasQuery()) return false;
if (hasQuery()) {
if (!getQuery().equals(other.getQuery())) return false;
}
if (getReturnFullEntity() != other.getReturnFullEntity()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + FEATURE_VIEW_FIELD_NUMBER;
hash = (53 * hash) + getFeatureView().hashCode();
if (hasQuery()) {
hash = (37 * hash) + QUERY_FIELD_NUMBER;
hash = (53 * hash) + getQuery().hashCode();
}
hash = (37 * hash) + RETURN_FULL_ENTITY_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getReturnFullEntity());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for
* [FeatureOnlineStoreService.SearchNearestEntities][google.cloud.aiplatform.v1beta1.FeatureOnlineStoreService.SearchNearestEntities].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest)
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.FeatureOnlineStoreServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_SearchNearestEntitiesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.FeatureOnlineStoreServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_SearchNearestEntitiesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest.class,
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getQueryFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
featureView_ = "";
query_ = null;
if (queryBuilder_ != null) {
queryBuilder_.dispose();
queryBuilder_ = null;
}
returnFullEntity_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.FeatureOnlineStoreServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_SearchNearestEntitiesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest build() {
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest buildPartial() {
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest result =
new com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.featureView_ = featureView_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.query_ = queryBuilder_ == null ? query_ : queryBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.returnFullEntity_ = returnFullEntity_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest) {
return mergeFrom((com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest other) {
if (other
== com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest.getDefaultInstance())
return this;
if (!other.getFeatureView().isEmpty()) {
featureView_ = other.featureView_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasQuery()) {
mergeQuery(other.getQuery());
}
if (other.getReturnFullEntity() != false) {
setReturnFullEntity(other.getReturnFullEntity());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
featureView_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getQueryFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
returnFullEntity_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object featureView_ = "";
/**
*
*
* <pre>
* Required. FeatureView resource format
* `projects/{project}/locations/{location}/featureOnlineStores/{featureOnlineStore}/featureViews/{featureView}`
* </pre>
*
* <code>
* string feature_view = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The featureView.
*/
public java.lang.String getFeatureView() {
java.lang.Object ref = featureView_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
featureView_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. FeatureView resource format
* `projects/{project}/locations/{location}/featureOnlineStores/{featureOnlineStore}/featureViews/{featureView}`
* </pre>
*
* <code>
* string feature_view = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for featureView.
*/
public com.google.protobuf.ByteString getFeatureViewBytes() {
java.lang.Object ref = featureView_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
featureView_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. FeatureView resource format
* `projects/{project}/locations/{location}/featureOnlineStores/{featureOnlineStore}/featureViews/{featureView}`
* </pre>
*
* <code>
* string feature_view = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The featureView to set.
* @return This builder for chaining.
*/
public Builder setFeatureView(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
featureView_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. FeatureView resource format
* `projects/{project}/locations/{location}/featureOnlineStores/{featureOnlineStore}/featureViews/{featureView}`
* </pre>
*
* <code>
* string feature_view = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearFeatureView() {
featureView_ = getDefaultInstance().getFeatureView();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. FeatureView resource format
* `projects/{project}/locations/{location}/featureOnlineStores/{featureOnlineStore}/featureViews/{featureView}`
* </pre>
*
* <code>
* string feature_view = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for featureView to set.
* @return This builder for chaining.
*/
public Builder setFeatureViewBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
featureView_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.aiplatform.v1beta1.NearestNeighborQuery query_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.NearestNeighborQuery,
com.google.cloud.aiplatform.v1beta1.NearestNeighborQuery.Builder,
com.google.cloud.aiplatform.v1beta1.NearestNeighborQueryOrBuilder>
queryBuilder_;
/**
*
*
* <pre>
* Required. The query.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the query field is set.
*/
public boolean hasQuery() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The query.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The query.
*/
public com.google.cloud.aiplatform.v1beta1.NearestNeighborQuery getQuery() {
if (queryBuilder_ == null) {
return query_ == null
? com.google.cloud.aiplatform.v1beta1.NearestNeighborQuery.getDefaultInstance()
: query_;
} else {
return queryBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The query.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setQuery(com.google.cloud.aiplatform.v1beta1.NearestNeighborQuery value) {
if (queryBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
query_ = value;
} else {
queryBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The query.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setQuery(
com.google.cloud.aiplatform.v1beta1.NearestNeighborQuery.Builder builderForValue) {
if (queryBuilder_ == null) {
query_ = builderForValue.build();
} else {
queryBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The query.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeQuery(com.google.cloud.aiplatform.v1beta1.NearestNeighborQuery value) {
if (queryBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& query_ != null
&& query_
!= com.google.cloud.aiplatform.v1beta1.NearestNeighborQuery.getDefaultInstance()) {
getQueryBuilder().mergeFrom(value);
} else {
query_ = value;
}
} else {
queryBuilder_.mergeFrom(value);
}
if (query_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The query.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearQuery() {
bitField0_ = (bitField0_ & ~0x00000002);
query_ = null;
if (queryBuilder_ != null) {
queryBuilder_.dispose();
queryBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The query.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.NearestNeighborQuery.Builder getQueryBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getQueryFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The query.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.NearestNeighborQueryOrBuilder getQueryOrBuilder() {
if (queryBuilder_ != null) {
return queryBuilder_.getMessageOrBuilder();
} else {
return query_ == null
? com.google.cloud.aiplatform.v1beta1.NearestNeighborQuery.getDefaultInstance()
: query_;
}
}
/**
*
*
* <pre>
* Required. The query.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.NearestNeighborQuery,
com.google.cloud.aiplatform.v1beta1.NearestNeighborQuery.Builder,
com.google.cloud.aiplatform.v1beta1.NearestNeighborQueryOrBuilder>
getQueryFieldBuilder() {
if (queryBuilder_ == null) {
queryBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.NearestNeighborQuery,
com.google.cloud.aiplatform.v1beta1.NearestNeighborQuery.Builder,
com.google.cloud.aiplatform.v1beta1.NearestNeighborQueryOrBuilder>(
getQuery(), getParentForChildren(), isClean());
query_ = null;
}
return queryBuilder_;
}
private boolean returnFullEntity_;
/**
*
*
* <pre>
* Optional. If set to true, the full entities (including all vector values
* and metadata) of the nearest neighbors are returned; otherwise only entity
* id of the nearest neighbors will be returned. Note that returning full
* entities will significantly increase the latency and cost of the query.
* </pre>
*
* <code>bool return_full_entity = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The returnFullEntity.
*/
@java.lang.Override
public boolean getReturnFullEntity() {
return returnFullEntity_;
}
/**
*
*
* <pre>
* Optional. If set to true, the full entities (including all vector values
* and metadata) of the nearest neighbors are returned; otherwise only entity
* id of the nearest neighbors will be returned. Note that returning full
* entities will significantly increase the latency and cost of the query.
* </pre>
*
* <code>bool return_full_entity = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The returnFullEntity to set.
* @return This builder for chaining.
*/
public Builder setReturnFullEntity(boolean value) {
returnFullEntity_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. If set to true, the full entities (including all vector values
* and metadata) of the nearest neighbors are returned; otherwise only entity
* id of the nearest neighbors will be returned. Note that returning full
* entities will significantly increase the latency and cost of the query.
* </pre>
*
* <code>bool return_full_entity = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearReturnFullEntity() {
bitField0_ = (bitField0_ & ~0x00000004);
returnFullEntity_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest)
private static final com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest();
}
public static com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SearchNearestEntitiesRequest> PARSER =
new com.google.protobuf.AbstractParser<SearchNearestEntitiesRequest>() {
@java.lang.Override
public SearchNearestEntitiesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SearchNearestEntitiesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SearchNearestEntitiesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,831 | java-translate/proto-google-cloud-translate-v3/src/main/java/com/google/cloud/translate/v3/ListExamplesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/translate/v3/automl_translation.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.translate.v3;
/**
*
*
* <pre>
* Request message for ListExamples.
* </pre>
*
* Protobuf type {@code google.cloud.translation.v3.ListExamplesRequest}
*/
public final class ListExamplesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.translation.v3.ListExamplesRequest)
ListExamplesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListExamplesRequest.newBuilder() to construct.
private ListExamplesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListExamplesRequest() {
parent_ = "";
filter_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListExamplesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.translate.v3.AutoMLTranslationProto
.internal_static_google_cloud_translation_v3_ListExamplesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.translate.v3.AutoMLTranslationProto
.internal_static_google_cloud_translation_v3_ListExamplesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.translate.v3.ListExamplesRequest.class,
com.google.cloud.translate.v3.ListExamplesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Name of the parent dataset. In form of
* `projects/{project-number-or-id}/locations/{location-id}/datasets/{dataset-id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Name of the parent dataset. In form of
* `projects/{project-number-or-id}/locations/{location-id}/datasets/{dataset-id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. An expression for filtering the examples that will be returned.
* Example filter:
* * `usage=TRAIN`
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. An expression for filtering the examples that will be returned.
* Example filter:
* * `usage=TRAIN`
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. Requested page size. The server can return fewer results than
* requested.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A token identifying a page of results for the server to return.
* Typically obtained from next_page_token field in the response of a
* ListExamples call.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A token identifying a page of results for the server to return.
* Typically obtained from next_page_token field in the response of a
* ListExamples call.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, filter_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, filter_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.translate.v3.ListExamplesRequest)) {
return super.equals(obj);
}
com.google.cloud.translate.v3.ListExamplesRequest other =
(com.google.cloud.translate.v3.ListExamplesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.translate.v3.ListExamplesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.translate.v3.ListExamplesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.translate.v3.ListExamplesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.translate.v3.ListExamplesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.translate.v3.ListExamplesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.translate.v3.ListExamplesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.translate.v3.ListExamplesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.translate.v3.ListExamplesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.translate.v3.ListExamplesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.translate.v3.ListExamplesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.translate.v3.ListExamplesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.translate.v3.ListExamplesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.translate.v3.ListExamplesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for ListExamples.
* </pre>
*
* Protobuf type {@code google.cloud.translation.v3.ListExamplesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.translation.v3.ListExamplesRequest)
com.google.cloud.translate.v3.ListExamplesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.translate.v3.AutoMLTranslationProto
.internal_static_google_cloud_translation_v3_ListExamplesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.translate.v3.AutoMLTranslationProto
.internal_static_google_cloud_translation_v3_ListExamplesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.translate.v3.ListExamplesRequest.class,
com.google.cloud.translate.v3.ListExamplesRequest.Builder.class);
}
// Construct using com.google.cloud.translate.v3.ListExamplesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
filter_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.translate.v3.AutoMLTranslationProto
.internal_static_google_cloud_translation_v3_ListExamplesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.translate.v3.ListExamplesRequest getDefaultInstanceForType() {
return com.google.cloud.translate.v3.ListExamplesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.translate.v3.ListExamplesRequest build() {
com.google.cloud.translate.v3.ListExamplesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.translate.v3.ListExamplesRequest buildPartial() {
com.google.cloud.translate.v3.ListExamplesRequest result =
new com.google.cloud.translate.v3.ListExamplesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.translate.v3.ListExamplesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.translate.v3.ListExamplesRequest) {
return mergeFrom((com.google.cloud.translate.v3.ListExamplesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.translate.v3.ListExamplesRequest other) {
if (other == com.google.cloud.translate.v3.ListExamplesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Name of the parent dataset. In form of
* `projects/{project-number-or-id}/locations/{location-id}/datasets/{dataset-id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the parent dataset. In form of
* `projects/{project-number-or-id}/locations/{location-id}/datasets/{dataset-id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the parent dataset. In form of
* `projects/{project-number-or-id}/locations/{location-id}/datasets/{dataset-id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the parent dataset. In form of
* `projects/{project-number-or-id}/locations/{location-id}/datasets/{dataset-id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the parent dataset. In form of
* `projects/{project-number-or-id}/locations/{location-id}/datasets/{dataset-id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. An expression for filtering the examples that will be returned.
* Example filter:
* * `usage=TRAIN`
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. An expression for filtering the examples that will be returned.
* Example filter:
* * `usage=TRAIN`
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. An expression for filtering the examples that will be returned.
* Example filter:
* * `usage=TRAIN`
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. An expression for filtering the examples that will be returned.
* Example filter:
* * `usage=TRAIN`
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. An expression for filtering the examples that will be returned.
* Example filter:
* * `usage=TRAIN`
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. Requested page size. The server can return fewer results than
* requested.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. Requested page size. The server can return fewer results than
* requested.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Requested page size. The server can return fewer results than
* requested.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A token identifying a page of results for the server to return.
* Typically obtained from next_page_token field in the response of a
* ListExamples call.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A token identifying a page of results for the server to return.
* Typically obtained from next_page_token field in the response of a
* ListExamples call.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A token identifying a page of results for the server to return.
* Typically obtained from next_page_token field in the response of a
* ListExamples call.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A token identifying a page of results for the server to return.
* Typically obtained from next_page_token field in the response of a
* ListExamples call.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A token identifying a page of results for the server to return.
* Typically obtained from next_page_token field in the response of a
* ListExamples call.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.translation.v3.ListExamplesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.translation.v3.ListExamplesRequest)
private static final com.google.cloud.translate.v3.ListExamplesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.translate.v3.ListExamplesRequest();
}
public static com.google.cloud.translate.v3.ListExamplesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListExamplesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListExamplesRequest>() {
@java.lang.Override
public ListExamplesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListExamplesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListExamplesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.translate.v3.ListExamplesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hop | 35,998 | plugins/transforms/json/src/main/java/org/apache/hop/pipeline/transforms/jsonoutput/JsonOutputDialog.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hop.pipeline.transforms.jsonoutput;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import org.apache.hop.core.Const;
import org.apache.hop.core.Props;
import org.apache.hop.core.exception.HopException;
import org.apache.hop.core.row.IRowMeta;
import org.apache.hop.core.util.Utils;
import org.apache.hop.core.variables.IVariables;
import org.apache.hop.i18n.BaseMessages;
import org.apache.hop.pipeline.PipelineMeta;
import org.apache.hop.pipeline.transform.TransformMeta;
import org.apache.hop.ui.core.ConstUi;
import org.apache.hop.ui.core.PropsUi;
import org.apache.hop.ui.core.dialog.BaseDialog;
import org.apache.hop.ui.core.dialog.EnterSelectionDialog;
import org.apache.hop.ui.core.dialog.ErrorDialog;
import org.apache.hop.ui.core.dialog.MessageBox;
import org.apache.hop.ui.core.gui.GuiResource;
import org.apache.hop.ui.core.widget.ColumnInfo;
import org.apache.hop.ui.core.widget.ComboVar;
import org.apache.hop.ui.core.widget.TableView;
import org.apache.hop.ui.core.widget.TextVar;
import org.apache.hop.ui.pipeline.transform.BaseTransformDialog;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CCombo;
import org.eclipse.swt.custom.CTabFolder;
import org.eclipse.swt.custom.CTabItem;
import org.eclipse.swt.events.FocusEvent;
import org.eclipse.swt.events.FocusListener;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Cursor;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
public class JsonOutputDialog extends BaseTransformDialog {
private static final Class<?> PKG = JsonOutputMeta.class;
private Label wlEncoding;
private ComboVar wEncoding;
private Label wlOutputValue;
private TextVar wOutputValue;
private TextVar wBlocName;
private TextVar wNrRowsInBloc;
private TableView wFields;
private final JsonOutputMeta input;
private boolean gotEncodings = false;
private ColumnInfo[] colinf;
private Label wlAddToResult;
private Button wAddToResult;
private Label wlFilename;
private Button wbFilename;
private TextVar wFilename;
private Label wlExtension;
private TextVar wExtension;
private Label wlCreateParentFolder;
private Button wCreateParentFolder;
private Label wlDoNotOpenNewFileInit;
private Button wDoNotOpenNewFileInit;
private Label wlAddDate;
private Button wAddDate;
private Label wlAddTime;
private Button wAddTime;
private Button wbShowFiles;
private Label wlAppend;
private Button wAppend;
private CCombo wOperation;
private final List<String> inputFields = new ArrayList<>();
public JsonOutputDialog(
Shell parent, IVariables variables, JsonOutputMeta transformMeta, PipelineMeta pipelineMeta) {
super(parent, variables, transformMeta, pipelineMeta);
input = transformMeta;
}
@Override
public String open() {
Shell parent = getParent();
shell = new Shell(parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN);
PropsUi.setLook(shell);
setShellImage(shell, input);
ModifyListener lsMod = e -> input.setChanged();
changed = input.hasChanged();
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = PropsUi.getFormMargin();
formLayout.marginHeight = PropsUi.getFormMargin();
shell.setLayout(formLayout);
shell.setText(BaseMessages.getString(PKG, "JsonOutputDialog.DialogTitle"));
int middle = props.getMiddlePct();
int margin = PropsUi.getMargin();
// TransformName line
wlTransformName = new Label(shell, SWT.RIGHT);
wlTransformName.setText(BaseMessages.getString(PKG, "System.TransformName.Label"));
wlTransformName.setToolTipText(BaseMessages.getString(PKG, "System.TransformName.Tooltip"));
PropsUi.setLook(wlTransformName);
fdlTransformName = new FormData();
fdlTransformName.left = new FormAttachment(0, 0);
fdlTransformName.top = new FormAttachment(0, margin);
fdlTransformName.right = new FormAttachment(middle, -margin);
wlTransformName.setLayoutData(fdlTransformName);
wTransformName = new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
wTransformName.setText(transformName);
PropsUi.setLook(wTransformName);
wTransformName.addModifyListener(lsMod);
fdTransformName = new FormData();
fdTransformName.left = new FormAttachment(middle, 0);
fdTransformName.top = new FormAttachment(0, margin);
fdTransformName.right = new FormAttachment(100, 0);
wTransformName.setLayoutData(fdTransformName);
// Buttons at the bottom
//
wOk = new Button(shell, SWT.PUSH);
wOk.setText(BaseMessages.getString(PKG, "System.Button.OK"));
wOk.addListener(SWT.Selection, e -> ok());
wCancel = new Button(shell, SWT.PUSH);
wCancel.setText(BaseMessages.getString(PKG, "System.Button.Cancel"));
wCancel.addListener(SWT.Selection, e -> cancel());
setButtonPositions(new Button[] {wOk, wCancel}, margin, null);
CTabFolder wTabFolder = new CTabFolder(shell, SWT.BORDER);
PropsUi.setLook(wTabFolder, Props.WIDGET_STYLE_TAB);
// ////////////////////////
// START OF General TAB///
// /
CTabItem wGeneralTab = new CTabItem(wTabFolder, SWT.NONE);
wGeneralTab.setFont(GuiResource.getInstance().getFontDefault());
wGeneralTab.setText(BaseMessages.getString(PKG, "JsonOutputDialog.GeneralTab.TabTitle"));
FormLayout generalLayout = new FormLayout();
generalLayout.marginWidth = 3;
generalLayout.marginHeight = 3;
Composite wGeneralComp = new Composite(wTabFolder, SWT.NONE);
PropsUi.setLook(wGeneralComp);
wGeneralComp.setLayout(generalLayout);
// Operation
Label wlOperation = new Label(wGeneralComp, SWT.RIGHT);
wlOperation.setText(BaseMessages.getString(PKG, "JsonOutputDialog.Operation.Label"));
PropsUi.setLook(wlOperation);
FormData fdlOperation = new FormData();
fdlOperation.left = new FormAttachment(0, 0);
fdlOperation.right = new FormAttachment(middle, -margin);
fdlOperation.top = new FormAttachment(wNrRowsInBloc, margin);
wlOperation.setLayoutData(fdlOperation);
wOperation = new CCombo(wGeneralComp, SWT.BORDER | SWT.READ_ONLY);
PropsUi.setLook(wOperation);
wOperation.addModifyListener(lsMod);
FormData fdOperation = new FormData();
fdOperation.left = new FormAttachment(middle, 0);
fdOperation.top = new FormAttachment(wNrRowsInBloc, margin);
fdOperation.right = new FormAttachment(100, -margin);
wOperation.setLayoutData(fdOperation);
String[] operationTypeDescArray =
JsonOutputMeta.operationDescType
.keySet()
.toArray(new String[JsonOutputMeta.operationDescType.size()]);
wOperation.setItems(operationTypeDescArray);
wOperation.addSelectionListener(
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
updateOperation();
}
});
// Connection grouping?
// ////////////////////////
// START OF Settings GROUP
//
Group wSettings = new Group(wGeneralComp, SWT.SHADOW_NONE);
PropsUi.setLook(wSettings);
wSettings.setText(BaseMessages.getString(PKG, "JsonOutputDialog.Group.Settings.Label"));
FormLayout groupFileLayout = new FormLayout();
groupFileLayout.marginWidth = 10;
groupFileLayout.marginHeight = 10;
wSettings.setLayout(groupFileLayout);
Label wlBlocName = new Label(wSettings, SWT.RIGHT);
wlBlocName.setText(BaseMessages.getString(PKG, "JsonOutputDialog.BlocName.Label"));
PropsUi.setLook(wlBlocName);
FormData fdlBlocName = new FormData();
fdlBlocName.left = new FormAttachment(0, 0);
fdlBlocName.top = new FormAttachment(wOperation, margin);
fdlBlocName.right = new FormAttachment(middle, -margin);
wlBlocName.setLayoutData(fdlBlocName);
wBlocName = new TextVar(variables, wSettings, SWT.BORDER | SWT.READ_ONLY);
wBlocName.setEditable(true);
PropsUi.setLook(wBlocName);
wBlocName.addModifyListener(lsMod);
FormData fdBlocName = new FormData();
fdBlocName.left = new FormAttachment(middle, 0);
fdBlocName.top = new FormAttachment(wOperation, margin);
fdBlocName.right = new FormAttachment(100, 0);
wBlocName.setLayoutData(fdBlocName);
Label wlNrRowsInBloc = new Label(wSettings, SWT.RIGHT);
wlNrRowsInBloc.setText(BaseMessages.getString(PKG, "JsonOutputDialog.NrRowsInBloc.Label"));
PropsUi.setLook(wlNrRowsInBloc);
FormData fdlNrRowsInBloc = new FormData();
fdlNrRowsInBloc.left = new FormAttachment(0, 0);
fdlNrRowsInBloc.top = new FormAttachment(wBlocName, margin);
fdlNrRowsInBloc.right = new FormAttachment(middle, -margin);
wlNrRowsInBloc.setLayoutData(fdlNrRowsInBloc);
wNrRowsInBloc = new TextVar(variables, wSettings, SWT.BORDER | SWT.READ_ONLY);
wNrRowsInBloc.setToolTipText(
BaseMessages.getString(PKG, "JsonOutputDialog.NrRowsInBloc.ToolTip"));
wNrRowsInBloc.setEditable(true);
PropsUi.setLook(wNrRowsInBloc);
wNrRowsInBloc.addModifyListener(lsMod);
FormData fdNrRowsInBloc = new FormData();
fdNrRowsInBloc.left = new FormAttachment(middle, 0);
fdNrRowsInBloc.top = new FormAttachment(wBlocName, margin);
fdNrRowsInBloc.right = new FormAttachment(100, 0);
wNrRowsInBloc.setLayoutData(fdNrRowsInBloc);
wlOutputValue = new Label(wSettings, SWT.RIGHT);
wlOutputValue.setText(BaseMessages.getString(PKG, "JsonOutputDialog.OutputValue.Label"));
PropsUi.setLook(wlOutputValue);
FormData fdlOutputValue = new FormData();
fdlOutputValue.left = new FormAttachment(0, 0);
fdlOutputValue.top = new FormAttachment(wNrRowsInBloc, margin);
fdlOutputValue.right = new FormAttachment(middle, -margin);
wlOutputValue.setLayoutData(fdlOutputValue);
wOutputValue = new TextVar(variables, wSettings, SWT.BORDER | SWT.READ_ONLY);
wOutputValue.setEditable(true);
PropsUi.setLook(wOutputValue);
wOutputValue.addModifyListener(lsMod);
FormData fdOutputValue = new FormData();
fdOutputValue.left = new FormAttachment(middle, 0);
fdOutputValue.top = new FormAttachment(wNrRowsInBloc, margin);
fdOutputValue.right = new FormAttachment(100, 0);
wOutputValue.setLayoutData(fdOutputValue);
FormData fdSettings = new FormData();
fdSettings.left = new FormAttachment(0, margin);
fdSettings.top = new FormAttachment(wOperation, 2 * margin);
fdSettings.right = new FormAttachment(100, -margin);
wSettings.setLayoutData(fdSettings);
// ///////////////////////////////////////////////////////////
// / END OF Settings GROUP
// ///////////////////////////////////////////////////////////
// Connection grouping?
// ////////////////////////
// START OF FileName GROUP
//
Group wFileName = new Group(wGeneralComp, SWT.SHADOW_NONE);
PropsUi.setLook(wFileName);
wFileName.setText(BaseMessages.getString(PKG, "JsonOutputDialog.Group.File.Label"));
FormLayout groupfilenameayout = new FormLayout();
groupfilenameayout.marginWidth = 10;
groupfilenameayout.marginHeight = 10;
wFileName.setLayout(groupfilenameayout);
// Filename line
wlFilename = new Label(wFileName, SWT.RIGHT);
wlFilename.setText(BaseMessages.getString(PKG, "JsonOutputDialog.Filename.Label"));
PropsUi.setLook(wlFilename);
FormData fdlFilename = new FormData();
fdlFilename.left = new FormAttachment(0, 0);
fdlFilename.top = new FormAttachment(wSettings, margin);
fdlFilename.right = new FormAttachment(middle, -margin);
wlFilename.setLayoutData(fdlFilename);
wbFilename = new Button(wFileName, SWT.PUSH | SWT.CENTER);
PropsUi.setLook(wbFilename);
wbFilename.setText(BaseMessages.getString(PKG, "System.Button.Browse"));
FormData fdbFilename = new FormData();
fdbFilename.right = new FormAttachment(100, 0);
fdbFilename.top = new FormAttachment(wSettings, 0);
wbFilename.setLayoutData(fdbFilename);
wbFilename.addListener(
SWT.Selection,
e ->
BaseDialog.presentFileDialog(
true,
shell,
wFilename,
variables,
new String[] {"*.js", "*.json", "*"},
new String[] {
BaseMessages.getString(PKG, "System.FileType.JsonFiles"),
BaseMessages.getString(PKG, "System.FileType.AllFiles")
},
true));
wFilename = new TextVar(variables, wFileName, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
PropsUi.setLook(wFilename);
wFilename.addModifyListener(lsMod);
FormData fdFilename = new FormData();
fdFilename.left = new FormAttachment(middle, 0);
fdFilename.top = new FormAttachment(wOutputValue, margin);
fdFilename.right = new FormAttachment(wbFilename, -margin);
wFilename.setLayoutData(fdFilename);
// Append to end of file?
wlAppend = new Label(wFileName, SWT.RIGHT);
wlAppend.setText(BaseMessages.getString(PKG, "JsonOutputDialog.Append.Label"));
PropsUi.setLook(wlAppend);
FormData fdlAppend = new FormData();
fdlAppend.left = new FormAttachment(0, 0);
fdlAppend.top = new FormAttachment(wFilename, margin);
fdlAppend.right = new FormAttachment(middle, -margin);
wlAppend.setLayoutData(fdlAppend);
wAppend = new Button(wFileName, SWT.CHECK);
wAppend.setToolTipText(BaseMessages.getString(PKG, "JsonOutputDialog.Append.Tooltip"));
PropsUi.setLook(wAppend);
FormData fdAppend = new FormData();
fdAppend.left = new FormAttachment(middle, 0);
fdAppend.top = new FormAttachment(wlAppend, 0, SWT.CENTER);
fdAppend.right = new FormAttachment(100, 0);
wAppend.setLayoutData(fdAppend);
wAppend.addSelectionListener(
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
input.setChanged();
}
});
// Create Parent Folder
wlCreateParentFolder = new Label(wFileName, SWT.RIGHT);
wlCreateParentFolder.setText(
BaseMessages.getString(PKG, "JsonOutputDialog.CreateParentFolder.Label"));
PropsUi.setLook(wlCreateParentFolder);
FormData fdlCreateParentFolder = new FormData();
fdlCreateParentFolder.left = new FormAttachment(0, 0);
fdlCreateParentFolder.top = new FormAttachment(wAppend, margin);
fdlCreateParentFolder.right = new FormAttachment(middle, -margin);
wlCreateParentFolder.setLayoutData(fdlCreateParentFolder);
wCreateParentFolder = new Button(wFileName, SWT.CHECK);
wCreateParentFolder.setToolTipText(
BaseMessages.getString(PKG, "JsonOutputDialog.CreateParentFolder.Tooltip"));
PropsUi.setLook(wCreateParentFolder);
FormData fdCreateParentFolder = new FormData();
fdCreateParentFolder.left = new FormAttachment(middle, 0);
fdCreateParentFolder.top = new FormAttachment(wlCreateParentFolder, 0, SWT.CENTER);
fdCreateParentFolder.right = new FormAttachment(100, 0);
wCreateParentFolder.setLayoutData(fdCreateParentFolder);
wCreateParentFolder.addSelectionListener(
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
input.setChanged();
}
});
// Open new File at Init
wlDoNotOpenNewFileInit = new Label(wFileName, SWT.RIGHT);
wlDoNotOpenNewFileInit.setText(
BaseMessages.getString(PKG, "JsonOutputDialog.DoNotOpenNewFileInit.Label"));
PropsUi.setLook(wlDoNotOpenNewFileInit);
FormData fdlDoNotOpenNewFileInit = new FormData();
fdlDoNotOpenNewFileInit.left = new FormAttachment(0, 0);
fdlDoNotOpenNewFileInit.top = new FormAttachment(wCreateParentFolder, margin);
fdlDoNotOpenNewFileInit.right = new FormAttachment(middle, -margin);
wlDoNotOpenNewFileInit.setLayoutData(fdlDoNotOpenNewFileInit);
wDoNotOpenNewFileInit = new Button(wFileName, SWT.CHECK);
wDoNotOpenNewFileInit.setToolTipText(
BaseMessages.getString(PKG, "JsonOutputDialog.DoNotOpenNewFileInit.Tooltip"));
PropsUi.setLook(wDoNotOpenNewFileInit);
FormData fdDoNotOpenNewFileInit = new FormData();
fdDoNotOpenNewFileInit.left = new FormAttachment(middle, 0);
fdDoNotOpenNewFileInit.top = new FormAttachment(wlDoNotOpenNewFileInit, 0, SWT.CENTER);
fdDoNotOpenNewFileInit.right = new FormAttachment(100, 0);
wDoNotOpenNewFileInit.setLayoutData(fdDoNotOpenNewFileInit);
wDoNotOpenNewFileInit.addSelectionListener(
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
input.setChanged();
}
});
// Extension line
wlExtension = new Label(wFileName, SWT.RIGHT);
wlExtension.setText(BaseMessages.getString(PKG, "System.Label.Extension"));
PropsUi.setLook(wlExtension);
FormData fdlExtension = new FormData();
fdlExtension.left = new FormAttachment(0, 0);
fdlExtension.top = new FormAttachment(wDoNotOpenNewFileInit, margin);
fdlExtension.right = new FormAttachment(middle, -margin);
wlExtension.setLayoutData(fdlExtension);
wExtension = new TextVar(variables, wFileName, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
PropsUi.setLook(wExtension);
wExtension.addModifyListener(lsMod);
FormData fdExtension = new FormData();
fdExtension.left = new FormAttachment(middle, 0);
fdExtension.top = new FormAttachment(wDoNotOpenNewFileInit, margin);
fdExtension.right = new FormAttachment(100, -margin);
wExtension.setLayoutData(fdExtension);
wlEncoding = new Label(wFileName, SWT.RIGHT);
wlEncoding.setText(BaseMessages.getString(PKG, "JsonOutputDialog.Encoding.Label"));
PropsUi.setLook(wlEncoding);
FormData fdlEncoding = new FormData();
fdlEncoding.left = new FormAttachment(0, 0);
fdlEncoding.top = new FormAttachment(wExtension, margin);
fdlEncoding.right = new FormAttachment(middle, -margin);
wlEncoding.setLayoutData(fdlEncoding);
wEncoding = new ComboVar(variables, wFileName, SWT.BORDER | SWT.READ_ONLY);
wEncoding.setEditable(true);
PropsUi.setLook(wEncoding);
wEncoding.addModifyListener(lsMod);
FormData fdEncoding = new FormData();
fdEncoding.left = new FormAttachment(middle, 0);
fdEncoding.top = new FormAttachment(wExtension, margin);
fdEncoding.right = new FormAttachment(100, 0);
wEncoding.setLayoutData(fdEncoding);
wEncoding.addFocusListener(
new FocusListener() {
@Override
public void focusLost(FocusEvent e) {
// Do nothing
}
@Override
public void focusGained(FocusEvent e) {
Cursor busy = new Cursor(shell.getDisplay(), SWT.CURSOR_WAIT);
shell.setCursor(busy);
setEncodings();
shell.setCursor(null);
busy.dispose();
}
});
// Create multi-part file?
wlAddDate = new Label(wFileName, SWT.RIGHT);
wlAddDate.setText(BaseMessages.getString(PKG, "JsonOutputDialog.AddDate.Label"));
PropsUi.setLook(wlAddDate);
FormData fdlAddDate = new FormData();
fdlAddDate.left = new FormAttachment(0, 0);
fdlAddDate.top = new FormAttachment(wEncoding, margin);
fdlAddDate.right = new FormAttachment(middle, -margin);
wlAddDate.setLayoutData(fdlAddDate);
wAddDate = new Button(wFileName, SWT.CHECK);
PropsUi.setLook(wAddDate);
FormData fdAddDate = new FormData();
fdAddDate.left = new FormAttachment(middle, 0);
fdAddDate.top = new FormAttachment(wlAddDate, 0, SWT.CENTER);
fdAddDate.right = new FormAttachment(100, 0);
wAddDate.setLayoutData(fdAddDate);
wAddDate.addSelectionListener(
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
input.setChanged();
}
});
// Create multi-part file?
wlAddTime = new Label(wFileName, SWT.RIGHT);
wlAddTime.setText(BaseMessages.getString(PKG, "JsonOutputDialog.AddTime.Label"));
PropsUi.setLook(wlAddTime);
FormData fdlAddTime = new FormData();
fdlAddTime.left = new FormAttachment(0, 0);
fdlAddTime.top = new FormAttachment(wAddDate, margin);
fdlAddTime.right = new FormAttachment(middle, -margin);
wlAddTime.setLayoutData(fdlAddTime);
wAddTime = new Button(wFileName, SWT.CHECK);
PropsUi.setLook(wAddTime);
FormData fdAddTime = new FormData();
fdAddTime.left = new FormAttachment(middle, 0);
fdAddTime.top = new FormAttachment(wlAddTime, 0, SWT.CENTER);
fdAddTime.right = new FormAttachment(100, 0);
wAddTime.setLayoutData(fdAddTime);
wAddTime.addSelectionListener(
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
input.setChanged();
}
});
wbShowFiles = new Button(wFileName, SWT.PUSH | SWT.CENTER);
PropsUi.setLook(wbShowFiles);
wbShowFiles.setText(BaseMessages.getString(PKG, "JsonOutputDialog.ShowFiles.Button"));
FormData fdbShowFiles = new FormData();
fdbShowFiles.left = new FormAttachment(middle, 0);
fdbShowFiles.top = new FormAttachment(wAddTime, margin * 2);
wbShowFiles.setLayoutData(fdbShowFiles);
wbShowFiles.addSelectionListener(
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
JsonOutputMeta tfoi = new JsonOutputMeta();
getInfo(tfoi);
String[] files = tfoi.getFiles(variables);
if (files != null && files.length > 0) {
EnterSelectionDialog esd =
new EnterSelectionDialog(
shell,
files,
BaseMessages.getString(PKG, "JsonOutputDialog.SelectOutputFiles.DialogTitle"),
BaseMessages.getString(
PKG, "JsonOutputDialog.SelectOutputFiles.DialogMessage"));
esd.setViewOnly();
esd.open();
} else {
MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR);
mb.setMessage(
BaseMessages.getString(PKG, "JsonOutputDialog.NoFilesFound.DialogMessage"));
mb.setText(BaseMessages.getString(PKG, "System.DialogTitle.Error"));
mb.open();
}
}
});
// Add File to the result files name
wlAddToResult = new Label(wFileName, SWT.RIGHT);
wlAddToResult.setText(BaseMessages.getString(PKG, "JsonOutputDialog.AddFileToResult.Label"));
PropsUi.setLook(wlAddToResult);
FormData fdlAddToResult = new FormData();
fdlAddToResult.left = new FormAttachment(0, 0);
fdlAddToResult.top = new FormAttachment(wbShowFiles, margin);
fdlAddToResult.right = new FormAttachment(middle, -margin);
wlAddToResult.setLayoutData(fdlAddToResult);
wAddToResult = new Button(wFileName, SWT.CHECK);
wAddToResult.setToolTipText(
BaseMessages.getString(PKG, "JsonOutputDialog.AddFileToResult.Tooltip"));
PropsUi.setLook(wAddToResult);
FormData fdAddToResult = new FormData();
fdAddToResult.left = new FormAttachment(middle, 0);
fdAddToResult.top = new FormAttachment(wlAddToResult, 0, SWT.CENTER);
fdAddToResult.right = new FormAttachment(100, 0);
wAddToResult.setLayoutData(fdAddToResult);
SelectionAdapter lsSelR =
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent arg0) {
input.setChanged();
}
};
wAddToResult.addSelectionListener(lsSelR);
FormData fdFileName = new FormData();
fdFileName.left = new FormAttachment(0, margin);
fdFileName.top = new FormAttachment(wSettings, 2 * margin);
fdFileName.right = new FormAttachment(100, -margin);
wFileName.setLayoutData(fdFileName);
// ///////////////////////////////////////////////////////////
// / END OF FileName GROUP
// ///////////////////////////////////////////////////////////
FormData fdGeneralComp = new FormData();
fdGeneralComp.left = new FormAttachment(0, 0);
fdGeneralComp.top = new FormAttachment(wTransformName, margin);
fdGeneralComp.right = new FormAttachment(100, 0);
fdGeneralComp.bottom = new FormAttachment(100, 0);
wGeneralComp.setLayoutData(fdGeneralComp);
wGeneralComp.layout();
wGeneralTab.setControl(wGeneralComp);
// ///////////////////////////////////////////////////////////
// / END OF General TAB
// ///////////////////////////////////////////////////////////
// Fields tab...
//
CTabItem wFieldsTab = new CTabItem(wTabFolder, SWT.NONE);
wFieldsTab.setFont(GuiResource.getInstance().getFontDefault());
wFieldsTab.setText(BaseMessages.getString(PKG, "JsonOutputDialog.FieldsTab.TabTitle"));
FormLayout fieldsLayout = new FormLayout();
fieldsLayout.marginWidth = PropsUi.getFormMargin();
fieldsLayout.marginHeight = PropsUi.getFormMargin();
Composite wFieldsComp = new Composite(wTabFolder, SWT.NONE);
wFieldsComp.setLayout(fieldsLayout);
PropsUi.setLook(wFieldsComp);
wGet = new Button(wFieldsComp, SWT.PUSH);
wGet.setText(BaseMessages.getString(PKG, "JsonOutputDialog.Get.Button"));
wGet.setToolTipText(BaseMessages.getString(PKG, "JsonOutputDialog.Get.Tooltip"));
setButtonPositions(new Button[] {wGet}, margin, null);
final int FieldsRows = input.getOutputFields().size();
colinf =
new ColumnInfo[] {
new ColumnInfo(
BaseMessages.getString(PKG, "JsonOutputDialog.Fieldname.Column"),
ColumnInfo.COLUMN_TYPE_CCOMBO,
new String[] {""},
false),
new ColumnInfo(
BaseMessages.getString(PKG, "JsonOutputDialog.ElementName.Column"),
ColumnInfo.COLUMN_TYPE_TEXT,
false),
};
colinf[1].setUsingVariables(true);
wFields =
new TableView(
variables,
wFieldsComp,
SWT.BORDER | SWT.FULL_SELECTION | SWT.MULTI,
colinf,
FieldsRows,
lsMod,
props);
FormData fdFields = new FormData();
fdFields.left = new FormAttachment(0, 0);
fdFields.top = new FormAttachment(0, 0);
fdFields.right = new FormAttachment(100, 0);
fdFields.bottom = new FormAttachment(wGet, -margin);
wFields.setLayoutData(fdFields);
//
// Search the fields in the background
final Runnable runnable =
() -> {
TransformMeta transformMeta = pipelineMeta.findTransform(transformName);
if (transformMeta != null) {
try {
IRowMeta row = pipelineMeta.getPrevTransformFields(variables, transformMeta);
// Remember these fields...
for (int i = 0; i < row.size(); i++) {
inputFields.add(row.getValueMeta(i).getName());
}
setComboBoxes();
} catch (HopException e) {
logError(BaseMessages.getString(PKG, "System.Dialog.GetFieldsFailed.Message"));
}
}
};
new Thread(runnable).start();
FormData fdFieldsComp = new FormData();
fdFieldsComp.left = new FormAttachment(0, 0);
fdFieldsComp.top = new FormAttachment(0, 0);
fdFieldsComp.right = new FormAttachment(100, 0);
fdFieldsComp.bottom = new FormAttachment(100, 0);
wFieldsComp.setLayoutData(fdFieldsComp);
wFieldsComp.layout();
wFieldsTab.setControl(wFieldsComp);
FormData fdTabFolder = new FormData();
fdTabFolder.left = new FormAttachment(0, 0);
fdTabFolder.top = new FormAttachment(wTransformName, margin);
fdTabFolder.right = new FormAttachment(100, 0);
fdTabFolder.bottom = new FormAttachment(wOk, -2 * margin);
wTabFolder.setLayoutData(fdTabFolder);
wGet.addListener(SWT.Selection, e -> get());
lsResize =
event -> {
Point size = shell.getSize();
wFields.setSize(size.x - 10, size.y - 50);
wFields.table.setSize(size.x - 10, size.y - 50);
wFields.redraw();
};
shell.addListener(SWT.Resize, lsResize);
wTabFolder.setSelection(0);
getData();
updateOperation();
input.setChanged(changed);
BaseDialog.defaultShellHandling(shell, c -> ok(), c -> cancel());
return transformName;
}
protected void setComboBoxes() {
// Something was changed in the row.
String[] fieldNames = ConstUi.sortFieldNames(inputFields);
colinf[0].setComboValues(fieldNames);
}
private void setEncodings() {
// Encoding of the text file:
if (!gotEncodings) {
gotEncodings = true;
wEncoding.removeAll();
List<Charset> values = new ArrayList<>(Charset.availableCharsets().values());
for (Charset charSet : values) {
wEncoding.add(charSet.displayName());
}
// Now select the default!
String defEncoding = Const.getEnvironmentVariable("file.encoding", "UTF-8");
int idx = Const.indexOfString(defEncoding, wEncoding.getItems());
if (idx >= 0) {
wEncoding.select(idx);
} else {
wEncoding.select(Const.indexOfString("UTF-8", wEncoding.getItems()));
}
}
}
/** Copy information from the meta-data input to the dialog fields. */
public void getData() {
wBlocName.setText(Const.NVL(input.getJsonBloc(), ""));
wNrRowsInBloc.setText(Const.NVL(input.getNrRowsInBloc(), ""));
wEncoding.setText(Const.NVL(input.getEncoding(), ""));
wOutputValue.setText(Const.NVL(input.getOutputValue(), ""));
wOperation.setText(JsonOutputMeta.operationTypeDesc.get(input.getOperationType()));
wFilename.setText(Const.NVL(input.getFileName(), ""));
wCreateParentFolder.setSelection(input.isCreateParentFolder());
wExtension.setText(Const.NVL(input.getExtension(), ""));
wAddDate.setSelection(input.isDateInFilename());
wAddTime.setSelection(input.isTimeInFilename());
wAppend.setSelection(input.isFileAppended());
wEncoding.setText(Const.NVL(input.getEncoding(), ""));
wAddToResult.setSelection(input.isAddToResult());
wDoNotOpenNewFileInit.setSelection(input.isDoNotOpenNewFileInit());
if (isDebug()) {
logDebug(BaseMessages.getString(PKG, "JsonOutputDialog.Log.GettingFieldsInfo"));
}
for (int i = 0; i < input.getOutputFields().size(); i++) {
JsonOutputField field = input.getOutputFields().get(i);
TableItem item = wFields.table.getItem(i);
item.setText(1, Const.NVL(field.getFieldName(), ""));
item.setText(2, Const.NVL(field.getElementName(), ""));
}
wFields.optWidth(true);
wTransformName.selectAll();
wTransformName.setFocus();
}
private void cancel() {
transformName = null;
input.setChanged(backupChanged);
dispose();
}
private void getInfo(JsonOutputMeta jsometa) {
jsometa.setJsonBloc(wBlocName.getText());
jsometa.setNrRowsInBloc(wNrRowsInBloc.getText());
jsometa.setEncoding(wEncoding.getText());
jsometa.setOutputValue(wOutputValue.getText());
jsometa.setOperationType(JsonOutputMeta.operationDescType.get(wOperation.getText()));
jsometa.setCreateParentFolder(wCreateParentFolder.getSelection());
jsometa.setFileName(wFilename.getText());
jsometa.setExtension(wExtension.getText());
jsometa.setFileAppended(wAppend.getSelection());
jsometa.setDateInFilename(wAddDate.getSelection());
jsometa.setTimeInFilename(wAddTime.getSelection());
jsometa.setEncoding(wEncoding.getText());
jsometa.setAddToResult(wAddToResult.getSelection());
jsometa.setDoNotOpenNewFileInit(wDoNotOpenNewFileInit.getSelection());
int nrFields = wFields.nrNonEmpty();
input.getOutputFields().clear();
for (int i = 0; i < nrFields; i++) {
JsonOutputField field = new JsonOutputField();
TableItem item = wFields.getNonEmpty(i);
field.setFieldName(item.getText(1));
field.setElementName(item.getText(2));
jsometa.getOutputFields().add(field);
}
}
private void ok() {
if (Utils.isEmpty(wTransformName.getText())) {
return;
}
transformName = wTransformName.getText(); // return value
getInfo(input);
dispose();
}
private void get() {
try {
IRowMeta r = pipelineMeta.getPrevTransformFields(variables, transformName);
if (r != null) {
BaseTransformDialog.getFieldsFromPrevious(
r,
wFields,
1,
new int[] {1, 2},
new int[] {3},
5,
6,
(tableItem, v) -> {
if (v.isNumber() && v.getLength() > 0) {
int le = v.getLength();
int pr = v.getPrecision();
if (v.getPrecision() <= 0) {
pr = 0;
}
String mask = " ";
for (int m = 0; m < le - pr; m++) {
mask += "0";
}
if (pr > 0) {
mask += ".";
}
for (int m = 0; m < pr; m++) {
mask += "0";
}
tableItem.setText(4, mask);
}
return true;
});
}
} catch (HopException ke) {
new ErrorDialog(
shell,
BaseMessages.getString(PKG, "System.Dialog.GetFieldsFailed.Title"),
BaseMessages.getString(PKG, "System.Dialog.GetFieldsFailed.Message"),
ke);
}
}
private void updateOperation() {
String opType = wOperation.getText();
boolean activeFile = !opType.equals(JsonOutputMeta.OPERATION_TYPE_OUTPUT_VALUE);
wlFilename.setEnabled(activeFile);
wFilename.setEnabled(activeFile);
wbFilename.setEnabled(activeFile);
wlExtension.setEnabled(activeFile);
wExtension.setEnabled(activeFile);
wlEncoding.setEnabled(activeFile);
wEncoding.setEnabled(activeFile);
wlAppend.setEnabled(activeFile);
wAppend.setEnabled(activeFile);
wlCreateParentFolder.setEnabled(activeFile);
wCreateParentFolder.setEnabled(activeFile);
wlDoNotOpenNewFileInit.setEnabled(activeFile);
wDoNotOpenNewFileInit.setEnabled(activeFile);
wlAddDate.setEnabled(activeFile);
wAddDate.setEnabled(activeFile);
wlAddTime.setEnabled(activeFile);
wAddTime.setEnabled(activeFile);
wlAddToResult.setEnabled(activeFile);
wAddToResult.setEnabled(activeFile);
wbShowFiles.setEnabled(activeFile);
boolean activeOutputValue =
!Objects.equals(wOperation.getText(), JsonOutputMeta.OPERATION_TYPE_WRITE_TO_FILE);
wlOutputValue.setEnabled(activeOutputValue);
wOutputValue.setEnabled(activeOutputValue);
}
}
|
googleapis/google-cloud-java | 35,968 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/RegionInstanceGroupManagerUpdateInstanceConfigReq.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* RegionInstanceGroupManagers.updatePerInstanceConfigs
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq}
*/
public final class RegionInstanceGroupManagerUpdateInstanceConfigReq
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq)
RegionInstanceGroupManagerUpdateInstanceConfigReqOrBuilder {
private static final long serialVersionUID = 0L;
// Use RegionInstanceGroupManagerUpdateInstanceConfigReq.newBuilder() to construct.
private RegionInstanceGroupManagerUpdateInstanceConfigReq(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RegionInstanceGroupManagerUpdateInstanceConfigReq() {
perInstanceConfigs_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RegionInstanceGroupManagerUpdateInstanceConfigReq();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RegionInstanceGroupManagerUpdateInstanceConfigReq_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RegionInstanceGroupManagerUpdateInstanceConfigReq_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq.class,
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq.Builder
.class);
}
public static final int PER_INSTANCE_CONFIGS_FIELD_NUMBER = 526265001;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.compute.v1.PerInstanceConfig> perInstanceConfigs_;
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.compute.v1.PerInstanceConfig> getPerInstanceConfigsList() {
return perInstanceConfigs_;
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.compute.v1.PerInstanceConfigOrBuilder>
getPerInstanceConfigsOrBuilderList() {
return perInstanceConfigs_;
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
@java.lang.Override
public int getPerInstanceConfigsCount() {
return perInstanceConfigs_.size();
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.PerInstanceConfig getPerInstanceConfigs(int index) {
return perInstanceConfigs_.get(index);
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.PerInstanceConfigOrBuilder getPerInstanceConfigsOrBuilder(
int index) {
return perInstanceConfigs_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < perInstanceConfigs_.size(); i++) {
output.writeMessage(526265001, perInstanceConfigs_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < perInstanceConfigs_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
526265001, perInstanceConfigs_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq other =
(com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq) obj;
if (!getPerInstanceConfigsList().equals(other.getPerInstanceConfigsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getPerInstanceConfigsCount() > 0) {
hash = (37 * hash) + PER_INSTANCE_CONFIGS_FIELD_NUMBER;
hash = (53 * hash) + getPerInstanceConfigsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* RegionInstanceGroupManagers.updatePerInstanceConfigs
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq)
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReqOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RegionInstanceGroupManagerUpdateInstanceConfigReq_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RegionInstanceGroupManagerUpdateInstanceConfigReq_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq.class,
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq.Builder
.class);
}
// Construct using
// com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (perInstanceConfigsBuilder_ == null) {
perInstanceConfigs_ = java.util.Collections.emptyList();
} else {
perInstanceConfigs_ = null;
perInstanceConfigsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RegionInstanceGroupManagerUpdateInstanceConfigReq_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
getDefaultInstanceForType() {
return com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq build() {
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
buildPartial() {
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq result =
new com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq result) {
if (perInstanceConfigsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
perInstanceConfigs_ = java.util.Collections.unmodifiableList(perInstanceConfigs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.perInstanceConfigs_ = perInstanceConfigs_;
} else {
result.perInstanceConfigs_ = perInstanceConfigsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq) {
return mergeFrom(
(com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq other) {
if (other
== com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
.getDefaultInstance()) return this;
if (perInstanceConfigsBuilder_ == null) {
if (!other.perInstanceConfigs_.isEmpty()) {
if (perInstanceConfigs_.isEmpty()) {
perInstanceConfigs_ = other.perInstanceConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.addAll(other.perInstanceConfigs_);
}
onChanged();
}
} else {
if (!other.perInstanceConfigs_.isEmpty()) {
if (perInstanceConfigsBuilder_.isEmpty()) {
perInstanceConfigsBuilder_.dispose();
perInstanceConfigsBuilder_ = null;
perInstanceConfigs_ = other.perInstanceConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
perInstanceConfigsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getPerInstanceConfigsFieldBuilder()
: null;
} else {
perInstanceConfigsBuilder_.addAllMessages(other.perInstanceConfigs_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case -84847286:
{
com.google.cloud.compute.v1.PerInstanceConfig m =
input.readMessage(
com.google.cloud.compute.v1.PerInstanceConfig.parser(), extensionRegistry);
if (perInstanceConfigsBuilder_ == null) {
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.add(m);
} else {
perInstanceConfigsBuilder_.addMessage(m);
}
break;
} // case -84847286
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.compute.v1.PerInstanceConfig> perInstanceConfigs_ =
java.util.Collections.emptyList();
private void ensurePerInstanceConfigsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
perInstanceConfigs_ =
new java.util.ArrayList<com.google.cloud.compute.v1.PerInstanceConfig>(
perInstanceConfigs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.PerInstanceConfig,
com.google.cloud.compute.v1.PerInstanceConfig.Builder,
com.google.cloud.compute.v1.PerInstanceConfigOrBuilder>
perInstanceConfigsBuilder_;
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public java.util.List<com.google.cloud.compute.v1.PerInstanceConfig>
getPerInstanceConfigsList() {
if (perInstanceConfigsBuilder_ == null) {
return java.util.Collections.unmodifiableList(perInstanceConfigs_);
} else {
return perInstanceConfigsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public int getPerInstanceConfigsCount() {
if (perInstanceConfigsBuilder_ == null) {
return perInstanceConfigs_.size();
} else {
return perInstanceConfigsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public com.google.cloud.compute.v1.PerInstanceConfig getPerInstanceConfigs(int index) {
if (perInstanceConfigsBuilder_ == null) {
return perInstanceConfigs_.get(index);
} else {
return perInstanceConfigsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder setPerInstanceConfigs(
int index, com.google.cloud.compute.v1.PerInstanceConfig value) {
if (perInstanceConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.set(index, value);
onChanged();
} else {
perInstanceConfigsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder setPerInstanceConfigs(
int index, com.google.cloud.compute.v1.PerInstanceConfig.Builder builderForValue) {
if (perInstanceConfigsBuilder_ == null) {
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.set(index, builderForValue.build());
onChanged();
} else {
perInstanceConfigsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder addPerInstanceConfigs(com.google.cloud.compute.v1.PerInstanceConfig value) {
if (perInstanceConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.add(value);
onChanged();
} else {
perInstanceConfigsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder addPerInstanceConfigs(
int index, com.google.cloud.compute.v1.PerInstanceConfig value) {
if (perInstanceConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.add(index, value);
onChanged();
} else {
perInstanceConfigsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder addPerInstanceConfigs(
com.google.cloud.compute.v1.PerInstanceConfig.Builder builderForValue) {
if (perInstanceConfigsBuilder_ == null) {
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.add(builderForValue.build());
onChanged();
} else {
perInstanceConfigsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder addPerInstanceConfigs(
int index, com.google.cloud.compute.v1.PerInstanceConfig.Builder builderForValue) {
if (perInstanceConfigsBuilder_ == null) {
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.add(index, builderForValue.build());
onChanged();
} else {
perInstanceConfigsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder addAllPerInstanceConfigs(
java.lang.Iterable<? extends com.google.cloud.compute.v1.PerInstanceConfig> values) {
if (perInstanceConfigsBuilder_ == null) {
ensurePerInstanceConfigsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, perInstanceConfigs_);
onChanged();
} else {
perInstanceConfigsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder clearPerInstanceConfigs() {
if (perInstanceConfigsBuilder_ == null) {
perInstanceConfigs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
perInstanceConfigsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder removePerInstanceConfigs(int index) {
if (perInstanceConfigsBuilder_ == null) {
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.remove(index);
onChanged();
} else {
perInstanceConfigsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public com.google.cloud.compute.v1.PerInstanceConfig.Builder getPerInstanceConfigsBuilder(
int index) {
return getPerInstanceConfigsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public com.google.cloud.compute.v1.PerInstanceConfigOrBuilder getPerInstanceConfigsOrBuilder(
int index) {
if (perInstanceConfigsBuilder_ == null) {
return perInstanceConfigs_.get(index);
} else {
return perInstanceConfigsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public java.util.List<? extends com.google.cloud.compute.v1.PerInstanceConfigOrBuilder>
getPerInstanceConfigsOrBuilderList() {
if (perInstanceConfigsBuilder_ != null) {
return perInstanceConfigsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(perInstanceConfigs_);
}
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public com.google.cloud.compute.v1.PerInstanceConfig.Builder addPerInstanceConfigsBuilder() {
return getPerInstanceConfigsFieldBuilder()
.addBuilder(com.google.cloud.compute.v1.PerInstanceConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public com.google.cloud.compute.v1.PerInstanceConfig.Builder addPerInstanceConfigsBuilder(
int index) {
return getPerInstanceConfigsFieldBuilder()
.addBuilder(index, com.google.cloud.compute.v1.PerInstanceConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public java.util.List<com.google.cloud.compute.v1.PerInstanceConfig.Builder>
getPerInstanceConfigsBuilderList() {
return getPerInstanceConfigsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.PerInstanceConfig,
com.google.cloud.compute.v1.PerInstanceConfig.Builder,
com.google.cloud.compute.v1.PerInstanceConfigOrBuilder>
getPerInstanceConfigsFieldBuilder() {
if (perInstanceConfigsBuilder_ == null) {
perInstanceConfigsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.PerInstanceConfig,
com.google.cloud.compute.v1.PerInstanceConfig.Builder,
com.google.cloud.compute.v1.PerInstanceConfigOrBuilder>(
perInstanceConfigs_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
perInstanceConfigs_ = null;
}
return perInstanceConfigsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq)
private static final com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq();
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RegionInstanceGroupManagerUpdateInstanceConfigReq>
PARSER =
new com.google.protobuf.AbstractParser<
RegionInstanceGroupManagerUpdateInstanceConfigReq>() {
@java.lang.Override
public RegionInstanceGroupManagerUpdateInstanceConfigReq parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException()
.setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<RegionInstanceGroupManagerUpdateInstanceConfigReq>
parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RegionInstanceGroupManagerUpdateInstanceConfigReq>
getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,975 | java-migrationcenter/proto-google-cloud-migrationcenter-v1/src/main/java/com/google/cloud/migrationcenter/v1/MachinePreferences.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/migrationcenter/v1/migrationcenter.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.migrationcenter.v1;
/**
*
*
* <pre>
* The type of machines to consider when calculating virtual machine migration
* insights and recommendations.
* Not all machine types are available in all zones and regions.
* </pre>
*
* Protobuf type {@code google.cloud.migrationcenter.v1.MachinePreferences}
*/
public final class MachinePreferences extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.migrationcenter.v1.MachinePreferences)
MachinePreferencesOrBuilder {
private static final long serialVersionUID = 0L;
// Use MachinePreferences.newBuilder() to construct.
private MachinePreferences(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MachinePreferences() {
allowedMachineSeries_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new MachinePreferences();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.migrationcenter.v1.MigrationCenterProto
.internal_static_google_cloud_migrationcenter_v1_MachinePreferences_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.migrationcenter.v1.MigrationCenterProto
.internal_static_google_cloud_migrationcenter_v1_MachinePreferences_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.migrationcenter.v1.MachinePreferences.class,
com.google.cloud.migrationcenter.v1.MachinePreferences.Builder.class);
}
public static final int ALLOWED_MACHINE_SERIES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.migrationcenter.v1.MachineSeries> allowedMachineSeries_;
/**
*
*
* <pre>
* Compute Engine machine series to consider for insights and recommendations.
* If empty, no restriction is applied on the machine series.
* </pre>
*
* <code>repeated .google.cloud.migrationcenter.v1.MachineSeries allowed_machine_series = 1;
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.migrationcenter.v1.MachineSeries>
getAllowedMachineSeriesList() {
return allowedMachineSeries_;
}
/**
*
*
* <pre>
* Compute Engine machine series to consider for insights and recommendations.
* If empty, no restriction is applied on the machine series.
* </pre>
*
* <code>repeated .google.cloud.migrationcenter.v1.MachineSeries allowed_machine_series = 1;
* </code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.migrationcenter.v1.MachineSeriesOrBuilder>
getAllowedMachineSeriesOrBuilderList() {
return allowedMachineSeries_;
}
/**
*
*
* <pre>
* Compute Engine machine series to consider for insights and recommendations.
* If empty, no restriction is applied on the machine series.
* </pre>
*
* <code>repeated .google.cloud.migrationcenter.v1.MachineSeries allowed_machine_series = 1;
* </code>
*/
@java.lang.Override
public int getAllowedMachineSeriesCount() {
return allowedMachineSeries_.size();
}
/**
*
*
* <pre>
* Compute Engine machine series to consider for insights and recommendations.
* If empty, no restriction is applied on the machine series.
* </pre>
*
* <code>repeated .google.cloud.migrationcenter.v1.MachineSeries allowed_machine_series = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.migrationcenter.v1.MachineSeries getAllowedMachineSeries(int index) {
return allowedMachineSeries_.get(index);
}
/**
*
*
* <pre>
* Compute Engine machine series to consider for insights and recommendations.
* If empty, no restriction is applied on the machine series.
* </pre>
*
* <code>repeated .google.cloud.migrationcenter.v1.MachineSeries allowed_machine_series = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.migrationcenter.v1.MachineSeriesOrBuilder
getAllowedMachineSeriesOrBuilder(int index) {
return allowedMachineSeries_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < allowedMachineSeries_.size(); i++) {
output.writeMessage(1, allowedMachineSeries_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < allowedMachineSeries_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(1, allowedMachineSeries_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.migrationcenter.v1.MachinePreferences)) {
return super.equals(obj);
}
com.google.cloud.migrationcenter.v1.MachinePreferences other =
(com.google.cloud.migrationcenter.v1.MachinePreferences) obj;
if (!getAllowedMachineSeriesList().equals(other.getAllowedMachineSeriesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAllowedMachineSeriesCount() > 0) {
hash = (37 * hash) + ALLOWED_MACHINE_SERIES_FIELD_NUMBER;
hash = (53 * hash) + getAllowedMachineSeriesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.migrationcenter.v1.MachinePreferences parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.migrationcenter.v1.MachinePreferences parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.migrationcenter.v1.MachinePreferences parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.migrationcenter.v1.MachinePreferences parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.migrationcenter.v1.MachinePreferences parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.migrationcenter.v1.MachinePreferences parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.migrationcenter.v1.MachinePreferences parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.migrationcenter.v1.MachinePreferences parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.migrationcenter.v1.MachinePreferences parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.migrationcenter.v1.MachinePreferences parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.migrationcenter.v1.MachinePreferences parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.migrationcenter.v1.MachinePreferences parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.migrationcenter.v1.MachinePreferences prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The type of machines to consider when calculating virtual machine migration
* insights and recommendations.
* Not all machine types are available in all zones and regions.
* </pre>
*
* Protobuf type {@code google.cloud.migrationcenter.v1.MachinePreferences}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.migrationcenter.v1.MachinePreferences)
com.google.cloud.migrationcenter.v1.MachinePreferencesOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.migrationcenter.v1.MigrationCenterProto
.internal_static_google_cloud_migrationcenter_v1_MachinePreferences_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.migrationcenter.v1.MigrationCenterProto
.internal_static_google_cloud_migrationcenter_v1_MachinePreferences_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.migrationcenter.v1.MachinePreferences.class,
com.google.cloud.migrationcenter.v1.MachinePreferences.Builder.class);
}
// Construct using com.google.cloud.migrationcenter.v1.MachinePreferences.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (allowedMachineSeriesBuilder_ == null) {
allowedMachineSeries_ = java.util.Collections.emptyList();
} else {
allowedMachineSeries_ = null;
allowedMachineSeriesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.migrationcenter.v1.MigrationCenterProto
.internal_static_google_cloud_migrationcenter_v1_MachinePreferences_descriptor;
}
@java.lang.Override
public com.google.cloud.migrationcenter.v1.MachinePreferences getDefaultInstanceForType() {
return com.google.cloud.migrationcenter.v1.MachinePreferences.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.migrationcenter.v1.MachinePreferences build() {
com.google.cloud.migrationcenter.v1.MachinePreferences result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.migrationcenter.v1.MachinePreferences buildPartial() {
com.google.cloud.migrationcenter.v1.MachinePreferences result =
new com.google.cloud.migrationcenter.v1.MachinePreferences(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.migrationcenter.v1.MachinePreferences result) {
if (allowedMachineSeriesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
allowedMachineSeries_ = java.util.Collections.unmodifiableList(allowedMachineSeries_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.allowedMachineSeries_ = allowedMachineSeries_;
} else {
result.allowedMachineSeries_ = allowedMachineSeriesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.migrationcenter.v1.MachinePreferences result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.migrationcenter.v1.MachinePreferences) {
return mergeFrom((com.google.cloud.migrationcenter.v1.MachinePreferences) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.migrationcenter.v1.MachinePreferences other) {
if (other == com.google.cloud.migrationcenter.v1.MachinePreferences.getDefaultInstance())
return this;
if (allowedMachineSeriesBuilder_ == null) {
if (!other.allowedMachineSeries_.isEmpty()) {
if (allowedMachineSeries_.isEmpty()) {
allowedMachineSeries_ = other.allowedMachineSeries_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAllowedMachineSeriesIsMutable();
allowedMachineSeries_.addAll(other.allowedMachineSeries_);
}
onChanged();
}
} else {
if (!other.allowedMachineSeries_.isEmpty()) {
if (allowedMachineSeriesBuilder_.isEmpty()) {
allowedMachineSeriesBuilder_.dispose();
allowedMachineSeriesBuilder_ = null;
allowedMachineSeries_ = other.allowedMachineSeries_;
bitField0_ = (bitField0_ & ~0x00000001);
allowedMachineSeriesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAllowedMachineSeriesFieldBuilder()
: null;
} else {
allowedMachineSeriesBuilder_.addAllMessages(other.allowedMachineSeries_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.migrationcenter.v1.MachineSeries m =
input.readMessage(
com.google.cloud.migrationcenter.v1.MachineSeries.parser(),
extensionRegistry);
if (allowedMachineSeriesBuilder_ == null) {
ensureAllowedMachineSeriesIsMutable();
allowedMachineSeries_.add(m);
} else {
allowedMachineSeriesBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.migrationcenter.v1.MachineSeries>
allowedMachineSeries_ = java.util.Collections.emptyList();
private void ensureAllowedMachineSeriesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
allowedMachineSeries_ =
new java.util.ArrayList<com.google.cloud.migrationcenter.v1.MachineSeries>(
allowedMachineSeries_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.migrationcenter.v1.MachineSeries,
com.google.cloud.migrationcenter.v1.MachineSeries.Builder,
com.google.cloud.migrationcenter.v1.MachineSeriesOrBuilder>
allowedMachineSeriesBuilder_;
/**
*
*
* <pre>
* Compute Engine machine series to consider for insights and recommendations.
* If empty, no restriction is applied on the machine series.
* </pre>
*
* <code>repeated .google.cloud.migrationcenter.v1.MachineSeries allowed_machine_series = 1;
* </code>
*/
public java.util.List<com.google.cloud.migrationcenter.v1.MachineSeries>
getAllowedMachineSeriesList() {
if (allowedMachineSeriesBuilder_ == null) {
return java.util.Collections.unmodifiableList(allowedMachineSeries_);
} else {
return allowedMachineSeriesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Compute Engine machine series to consider for insights and recommendations.
* If empty, no restriction is applied on the machine series.
* </pre>
*
* <code>repeated .google.cloud.migrationcenter.v1.MachineSeries allowed_machine_series = 1;
* </code>
*/
public int getAllowedMachineSeriesCount() {
if (allowedMachineSeriesBuilder_ == null) {
return allowedMachineSeries_.size();
} else {
return allowedMachineSeriesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Compute Engine machine series to consider for insights and recommendations.
* If empty, no restriction is applied on the machine series.
* </pre>
*
* <code>repeated .google.cloud.migrationcenter.v1.MachineSeries allowed_machine_series = 1;
* </code>
*/
public com.google.cloud.migrationcenter.v1.MachineSeries getAllowedMachineSeries(int index) {
if (allowedMachineSeriesBuilder_ == null) {
return allowedMachineSeries_.get(index);
} else {
return allowedMachineSeriesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Compute Engine machine series to consider for insights and recommendations.
* If empty, no restriction is applied on the machine series.
* </pre>
*
* <code>repeated .google.cloud.migrationcenter.v1.MachineSeries allowed_machine_series = 1;
* </code>
*/
public Builder setAllowedMachineSeries(
int index, com.google.cloud.migrationcenter.v1.MachineSeries value) {
if (allowedMachineSeriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAllowedMachineSeriesIsMutable();
allowedMachineSeries_.set(index, value);
onChanged();
} else {
allowedMachineSeriesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Compute Engine machine series to consider for insights and recommendations.
* If empty, no restriction is applied on the machine series.
* </pre>
*
* <code>repeated .google.cloud.migrationcenter.v1.MachineSeries allowed_machine_series = 1;
* </code>
*/
public Builder setAllowedMachineSeries(
int index, com.google.cloud.migrationcenter.v1.MachineSeries.Builder builderForValue) {
if (allowedMachineSeriesBuilder_ == null) {
ensureAllowedMachineSeriesIsMutable();
allowedMachineSeries_.set(index, builderForValue.build());
onChanged();
} else {
allowedMachineSeriesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Compute Engine machine series to consider for insights and recommendations.
* If empty, no restriction is applied on the machine series.
* </pre>
*
* <code>repeated .google.cloud.migrationcenter.v1.MachineSeries allowed_machine_series = 1;
* </code>
*/
public Builder addAllowedMachineSeries(
com.google.cloud.migrationcenter.v1.MachineSeries value) {
if (allowedMachineSeriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAllowedMachineSeriesIsMutable();
allowedMachineSeries_.add(value);
onChanged();
} else {
allowedMachineSeriesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Compute Engine machine series to consider for insights and recommendations.
* If empty, no restriction is applied on the machine series.
* </pre>
*
* <code>repeated .google.cloud.migrationcenter.v1.MachineSeries allowed_machine_series = 1;
* </code>
*/
public Builder addAllowedMachineSeries(
int index, com.google.cloud.migrationcenter.v1.MachineSeries value) {
if (allowedMachineSeriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAllowedMachineSeriesIsMutable();
allowedMachineSeries_.add(index, value);
onChanged();
} else {
allowedMachineSeriesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Compute Engine machine series to consider for insights and recommendations.
* If empty, no restriction is applied on the machine series.
* </pre>
*
* <code>repeated .google.cloud.migrationcenter.v1.MachineSeries allowed_machine_series = 1;
* </code>
*/
public Builder addAllowedMachineSeries(
com.google.cloud.migrationcenter.v1.MachineSeries.Builder builderForValue) {
if (allowedMachineSeriesBuilder_ == null) {
ensureAllowedMachineSeriesIsMutable();
allowedMachineSeries_.add(builderForValue.build());
onChanged();
} else {
allowedMachineSeriesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Compute Engine machine series to consider for insights and recommendations.
* If empty, no restriction is applied on the machine series.
* </pre>
*
* <code>repeated .google.cloud.migrationcenter.v1.MachineSeries allowed_machine_series = 1;
* </code>
*/
public Builder addAllowedMachineSeries(
int index, com.google.cloud.migrationcenter.v1.MachineSeries.Builder builderForValue) {
if (allowedMachineSeriesBuilder_ == null) {
ensureAllowedMachineSeriesIsMutable();
allowedMachineSeries_.add(index, builderForValue.build());
onChanged();
} else {
allowedMachineSeriesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Compute Engine machine series to consider for insights and recommendations.
* If empty, no restriction is applied on the machine series.
* </pre>
*
* <code>repeated .google.cloud.migrationcenter.v1.MachineSeries allowed_machine_series = 1;
* </code>
*/
public Builder addAllAllowedMachineSeries(
java.lang.Iterable<? extends com.google.cloud.migrationcenter.v1.MachineSeries> values) {
if (allowedMachineSeriesBuilder_ == null) {
ensureAllowedMachineSeriesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, allowedMachineSeries_);
onChanged();
} else {
allowedMachineSeriesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Compute Engine machine series to consider for insights and recommendations.
* If empty, no restriction is applied on the machine series.
* </pre>
*
* <code>repeated .google.cloud.migrationcenter.v1.MachineSeries allowed_machine_series = 1;
* </code>
*/
public Builder clearAllowedMachineSeries() {
if (allowedMachineSeriesBuilder_ == null) {
allowedMachineSeries_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
allowedMachineSeriesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Compute Engine machine series to consider for insights and recommendations.
* If empty, no restriction is applied on the machine series.
* </pre>
*
* <code>repeated .google.cloud.migrationcenter.v1.MachineSeries allowed_machine_series = 1;
* </code>
*/
public Builder removeAllowedMachineSeries(int index) {
if (allowedMachineSeriesBuilder_ == null) {
ensureAllowedMachineSeriesIsMutable();
allowedMachineSeries_.remove(index);
onChanged();
} else {
allowedMachineSeriesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Compute Engine machine series to consider for insights and recommendations.
* If empty, no restriction is applied on the machine series.
* </pre>
*
* <code>repeated .google.cloud.migrationcenter.v1.MachineSeries allowed_machine_series = 1;
* </code>
*/
public com.google.cloud.migrationcenter.v1.MachineSeries.Builder getAllowedMachineSeriesBuilder(
int index) {
return getAllowedMachineSeriesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Compute Engine machine series to consider for insights and recommendations.
* If empty, no restriction is applied on the machine series.
* </pre>
*
* <code>repeated .google.cloud.migrationcenter.v1.MachineSeries allowed_machine_series = 1;
* </code>
*/
public com.google.cloud.migrationcenter.v1.MachineSeriesOrBuilder
getAllowedMachineSeriesOrBuilder(int index) {
if (allowedMachineSeriesBuilder_ == null) {
return allowedMachineSeries_.get(index);
} else {
return allowedMachineSeriesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Compute Engine machine series to consider for insights and recommendations.
* If empty, no restriction is applied on the machine series.
* </pre>
*
* <code>repeated .google.cloud.migrationcenter.v1.MachineSeries allowed_machine_series = 1;
* </code>
*/
public java.util.List<? extends com.google.cloud.migrationcenter.v1.MachineSeriesOrBuilder>
getAllowedMachineSeriesOrBuilderList() {
if (allowedMachineSeriesBuilder_ != null) {
return allowedMachineSeriesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(allowedMachineSeries_);
}
}
/**
*
*
* <pre>
* Compute Engine machine series to consider for insights and recommendations.
* If empty, no restriction is applied on the machine series.
* </pre>
*
* <code>repeated .google.cloud.migrationcenter.v1.MachineSeries allowed_machine_series = 1;
* </code>
*/
public com.google.cloud.migrationcenter.v1.MachineSeries.Builder
addAllowedMachineSeriesBuilder() {
return getAllowedMachineSeriesFieldBuilder()
.addBuilder(com.google.cloud.migrationcenter.v1.MachineSeries.getDefaultInstance());
}
/**
*
*
* <pre>
* Compute Engine machine series to consider for insights and recommendations.
* If empty, no restriction is applied on the machine series.
* </pre>
*
* <code>repeated .google.cloud.migrationcenter.v1.MachineSeries allowed_machine_series = 1;
* </code>
*/
public com.google.cloud.migrationcenter.v1.MachineSeries.Builder addAllowedMachineSeriesBuilder(
int index) {
return getAllowedMachineSeriesFieldBuilder()
.addBuilder(
index, com.google.cloud.migrationcenter.v1.MachineSeries.getDefaultInstance());
}
/**
*
*
* <pre>
* Compute Engine machine series to consider for insights and recommendations.
* If empty, no restriction is applied on the machine series.
* </pre>
*
* <code>repeated .google.cloud.migrationcenter.v1.MachineSeries allowed_machine_series = 1;
* </code>
*/
public java.util.List<com.google.cloud.migrationcenter.v1.MachineSeries.Builder>
getAllowedMachineSeriesBuilderList() {
return getAllowedMachineSeriesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.migrationcenter.v1.MachineSeries,
com.google.cloud.migrationcenter.v1.MachineSeries.Builder,
com.google.cloud.migrationcenter.v1.MachineSeriesOrBuilder>
getAllowedMachineSeriesFieldBuilder() {
if (allowedMachineSeriesBuilder_ == null) {
allowedMachineSeriesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.migrationcenter.v1.MachineSeries,
com.google.cloud.migrationcenter.v1.MachineSeries.Builder,
com.google.cloud.migrationcenter.v1.MachineSeriesOrBuilder>(
allowedMachineSeries_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
allowedMachineSeries_ = null;
}
return allowedMachineSeriesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.migrationcenter.v1.MachinePreferences)
}
// @@protoc_insertion_point(class_scope:google.cloud.migrationcenter.v1.MachinePreferences)
private static final com.google.cloud.migrationcenter.v1.MachinePreferences DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.migrationcenter.v1.MachinePreferences();
}
public static com.google.cloud.migrationcenter.v1.MachinePreferences getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MachinePreferences> PARSER =
new com.google.protobuf.AbstractParser<MachinePreferences>() {
@java.lang.Override
public MachinePreferences parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<MachinePreferences> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MachinePreferences> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.migrationcenter.v1.MachinePreferences getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
hibernate/hibernate-orm | 34,286 | tooling/metamodel-generator/src/main/java/org/hibernate/processor/util/TypeUtils.java | /*
* SPDX-License-Identifier: Apache-2.0
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.processor.util;
import jakarta.persistence.AccessType;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.hibernate.processor.Context;
import org.hibernate.processor.MetaModelGenerationException;
import org.hibernate.processor.model.Metamodel;
import javax.lang.model.element.AnnotationMirror;
import javax.lang.model.element.AnnotationValue;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Name;
import javax.lang.model.element.PackageElement;
import javax.lang.model.element.TypeElement;
import javax.lang.model.element.VariableElement;
import javax.lang.model.type.ArrayType;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.type.ExecutableType;
import javax.lang.model.type.TypeKind;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.type.TypeVariable;
import javax.lang.model.type.WildcardType;
import javax.lang.model.util.ElementFilter;
import javax.lang.model.util.Elements;
import javax.lang.model.util.SimpleTypeVisitor8;
import javax.tools.Diagnostic;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Stream;
import static java.beans.Introspector.decapitalize;
import static java.util.stream.Stream.concat;
import static org.hibernate.internal.util.StringHelper.split;
import static org.hibernate.processor.util.AccessTypeInformation.DEFAULT_ACCESS_TYPE;
import static org.hibernate.processor.util.Constants.ACCESS;
import static org.hibernate.processor.util.Constants.ELEMENT_COLLECTION;
import static org.hibernate.processor.util.Constants.EMBEDDABLE;
import static org.hibernate.processor.util.Constants.EMBEDDED_ID;
import static org.hibernate.processor.util.Constants.ENTITY;
import static org.hibernate.processor.util.Constants.ID;
import static org.hibernate.processor.util.Constants.JAVA_OBJECT;
import static org.hibernate.processor.util.Constants.MANY_TO_MANY;
import static org.hibernate.processor.util.Constants.MANY_TO_ONE;
import static org.hibernate.processor.util.Constants.MAP;
import static org.hibernate.processor.util.Constants.MAPPED_SUPERCLASS;
import static org.hibernate.processor.util.Constants.ONE_TO_MANY;
import static org.hibernate.processor.util.Constants.ONE_TO_ONE;
import static org.hibernate.processor.util.NullnessUtil.castNonNull;
import static org.hibernate.processor.util.StringUtil.isProperty;
import static org.hibernate.processor.util.StringUtil.removeDollar;
/**
* Utility class.
*
* @author Max Andersen
* @author Hardy Ferentschik
* @author Emmanuel Bernard
*/
public final class TypeUtils {
public static final String DEFAULT_ANNOTATION_PARAMETER_NAME = "value";
private static final Map<TypeKind, String> PRIMITIVE_WRAPPERS = new HashMap<>();
private static final Map<TypeKind, String> PRIMITIVES = new HashMap<>();
static {
PRIMITIVE_WRAPPERS.put( TypeKind.CHAR, "Character" );
PRIMITIVE_WRAPPERS.put( TypeKind.BYTE, "Byte" );
PRIMITIVE_WRAPPERS.put( TypeKind.SHORT, "Short" );
PRIMITIVE_WRAPPERS.put( TypeKind.INT, "Integer" );
PRIMITIVE_WRAPPERS.put( TypeKind.LONG, "Long" );
PRIMITIVE_WRAPPERS.put( TypeKind.BOOLEAN, "Boolean" );
PRIMITIVE_WRAPPERS.put( TypeKind.FLOAT, "Float" );
PRIMITIVE_WRAPPERS.put( TypeKind.DOUBLE, "Double" );
PRIMITIVES.put( TypeKind.CHAR, "char" );
PRIMITIVES.put( TypeKind.BYTE, "byte" );
PRIMITIVES.put( TypeKind.SHORT, "short" );
PRIMITIVES.put( TypeKind.INT, "int" );
PRIMITIVES.put( TypeKind.LONG, "long" );
PRIMITIVES.put( TypeKind.BOOLEAN, "boolean" );
PRIMITIVES.put( TypeKind.FLOAT, "float" );
PRIMITIVES.put( TypeKind.DOUBLE, "double" );
}
private TypeUtils() {
}
public static String toTypeString(TypeMirror type) {
return type.getKind().isPrimitive()
? castNonNull( PRIMITIVE_WRAPPERS.get( type.getKind() ) )
: TypeRenderingVisitor.toString( type );
}
public static String toArrayTypeString(ArrayType type, Context context) {
final TypeMirror componentType = type.getComponentType();
if ( componentType.getKind().isPrimitive() ) {
return PRIMITIVES.get( componentType.getKind() ) + "[]";
}
else {
// When an ArrayType is annotated with an annotation which uses TYPE_USE targets,
// we cannot simply take the TypeMirror returned by #getComponentType because it
// itself is an AnnotatedType.
//
// The simplest approach here to get the TypeMirror for both ArrayType use cases
// is to use the visitor to retrieve the underlying TypeMirror.
final TypeMirror component = componentType.accept(
new SimpleTypeVisitor8<TypeMirror, Void>() {
@Override
protected TypeMirror defaultAction(TypeMirror e, Void aVoid) {
return e;
}
},
null
);
return extractClosestRealTypeAsString( component, context ) + "[]";
}
}
public static @Nullable TypeElement getSuperclassTypeElement(TypeElement element) {
final TypeMirror superclass = element.getSuperclass();
//superclass of Object is of NoType which returns some other kind
if ( superclass.getKind() == TypeKind.DECLARED ) {
final DeclaredType declaredType = (DeclaredType) superclass;
return (TypeElement) declaredType.asElement();
}
else {
return null;
}
}
public static String extractClosestRealTypeAsString(TypeMirror type, Context context) {
final TypeMirror mirror = extractClosestRealType( type, context, new HashSet<>() );
return mirror == null ? "?" : mirror.toString();
}
private static @Nullable TypeMirror lowerBound(@Nullable TypeMirror bound) {
return bound == null || bound.getKind() == TypeKind.NULL ? null : bound;
}
private static @Nullable TypeMirror upperBound(@Nullable TypeMirror bound) {
if ( bound !=null && bound.getKind() == TypeKind.DECLARED ) {
final DeclaredType type = (DeclaredType) bound;
return type.asElement().getSimpleName().contentEquals(JAVA_OBJECT) ? null : bound;
}
else {
return null;
}
}
public static @Nullable TypeMirror extractClosestRealType(TypeMirror type, Context context, Set<TypeVariable> beingVisited) {
if ( type == null ) {
return null;
}
switch ( type.getKind() ) {
case TYPEVAR:
final TypeVariable typeVariable = (TypeVariable) type;
if ( !beingVisited.add( typeVariable ) ) {
// A self-referential type variable has to be represented as plain wildcard `?`
return context.getTypeUtils().getWildcardType( null, null );
}
else {
final WildcardType wildcardType = context.getTypeUtils().getWildcardType(
upperBound( extractClosestRealType( typeVariable.getUpperBound(), context, beingVisited ) ),
lowerBound( extractClosestRealType( typeVariable.getLowerBound(), context, beingVisited ) )
);
beingVisited.remove( typeVariable );
return wildcardType;
}
case WILDCARD:
final WildcardType wildcardType = (WildcardType) type;
return context.getTypeUtils().getWildcardType(
extractClosestRealType( wildcardType.getExtendsBound(), context, beingVisited ),
extractClosestRealType( wildcardType.getSuperBound(), context, beingVisited )
);
case DECLARED:
final DeclaredType declaredType = (DeclaredType) type;
final TypeElement typeElement = (TypeElement) declaredType.asElement();
return context.getTypeUtils().getDeclaredType(
typeElement,
declaredType.getTypeArguments().stream()
.map( arg -> extractClosestRealType( arg, context, beingVisited ) )
.toArray( TypeMirror[]::new )
);
default:
return context.getTypeUtils().erasure( type );
}
}
public static boolean containsAnnotation(Element element, String... annotations) {
assert element != null;
assert annotations != null;
final Set<String> annotationClassNames = Set.of(annotations);
for ( AnnotationMirror mirror : element.getAnnotationMirrors() ) {
if ( annotationClassNames.contains( mirror.getAnnotationType().toString() ) ) {
return true;
}
}
return false;
}
/**
* Returns {@code true} if the provided annotation type is of the same type as the provided class, {@code false} otherwise.
* This method uses the string class names for comparison. See also
* <a href="http://www.retep.org/2009/02/getting-class-values-from-annotations.html">getting-class-values-from-annotations</a>.
*
* @param annotationMirror The annotation mirror
* @param qualifiedName the fully qualified class name to check against
*
* @return {@code true} if the provided annotation type is of the same type as the provided class, {@code false} otherwise.
*/
public static boolean isAnnotationMirrorOfType(AnnotationMirror annotationMirror, String qualifiedName) {
assert annotationMirror != null;
assert qualifiedName != null;
final Element element = annotationMirror.getAnnotationType().asElement();
final TypeElement typeElement = (TypeElement) element;
return typeElement.getQualifiedName().contentEquals( qualifiedName );
}
/**
* Checks whether the {@code Element} hosts the annotation with the given fully qualified class name.
*
* @param element the element to check for the hosted annotation
* @param qualifiedName the fully qualified class name of the annotation to check for
*
* @return the annotation mirror for the specified annotation class from the {@code Element} or {@code null} in case
* the {@code TypeElement} does not host the specified annotation.
*/
public static @Nullable AnnotationMirror getAnnotationMirror(Element element, String qualifiedName) {
assert element != null;
assert qualifiedName != null;
for ( AnnotationMirror mirror : element.getAnnotationMirrors() ) {
if ( isAnnotationMirrorOfType( mirror, qualifiedName ) ) {
return mirror;
}
}
return null;
}
/**
* Checks whether the {@code Element} hosts the annotation (directly or inherited) with the given fully qualified class name.
*
* @param element the element to check for the hosted annotation
* @param qualifiedName the fully qualified class name of the annotation to check for
*
* @return the annotation mirror for the specified annotation class from the {@code Element} or {@code null} in case
* the {@code TypeElement} does not host the specified annotation (directly or inherited).
*/
public static @Nullable AnnotationMirror getInheritedAnnotationMirror(Elements elements, Element element, String qualifiedName) {
assert element != null;
assert qualifiedName != null;
for ( AnnotationMirror mirror : elements.getAllAnnotationMirrors(element) ) {
if ( isAnnotationMirrorOfType( mirror, qualifiedName ) ) {
return mirror;
}
}
return null;
}
public static boolean hasAnnotation(Element element, String qualifiedName) {
return getAnnotationMirror( element, qualifiedName ) != null;
}
public static boolean hasAnnotation(Element element, String... qualifiedNames) {
for ( String qualifiedName : qualifiedNames ) {
if ( hasAnnotation( element, qualifiedName ) ) {
return true;
}
}
return false;
}
public static @Nullable AnnotationValue getAnnotationValue(AnnotationMirror annotationMirror) {
return getAnnotationValue( annotationMirror, DEFAULT_ANNOTATION_PARAMETER_NAME );
}
public static @Nullable AnnotationValue getAnnotationValue(AnnotationMirror annotationMirror, String member) {
assert annotationMirror != null;
assert member != null;
for ( Map.Entry<? extends ExecutableElement, ? extends AnnotationValue> entry
: annotationMirror.getElementValues().entrySet() ) {
if ( entry.getKey().getSimpleName().contentEquals(member) ) {
return entry.getValue();
}
}
return null;
}
public static void determineAccessTypeForHierarchy(TypeElement searchedElement, Context context) {
final String qualifiedName = searchedElement.getQualifiedName().toString();
context.logMessage( Diagnostic.Kind.OTHER, "Determining access type for " + qualifiedName );
final AccessTypeInformation accessTypeInfo = context.getAccessTypeInfo( qualifiedName );
if ( accessTypeInfo != null && accessTypeInfo.isAccessTypeResolved() ) {
context.logMessage(
Diagnostic.Kind.OTHER,
"AccessType for " + searchedElement + " found in cache: " + accessTypeInfo
);
}
else {
// check for explicit access type
final AccessType forcedAccessType = determineAnnotationSpecifiedAccessType( searchedElement) ;
if ( forcedAccessType != null ) {
context.logMessage(
Diagnostic.Kind.OTHER,
"Explicit access type on " + searchedElement + ":" + forcedAccessType
);
final AccessTypeInformation newAccessTypeInfo =
new AccessTypeInformation( qualifiedName, forcedAccessType, null );
context.addAccessTypeInformation( qualifiedName, newAccessTypeInfo );
updateEmbeddableAccessType( searchedElement, context, forcedAccessType );
}
else {
// need to find the default access type for this class
// let's check first if this entity is the root of the class hierarchy and defines an id. If so the
// placement of the id annotation determines the access type
final AccessType defaultAccessType = getAccessTypeInCaseElementIsRoot( searchedElement, context );
if ( defaultAccessType != null ) {
final AccessTypeInformation newAccessTypeInfo =
new AccessTypeInformation(qualifiedName, null, defaultAccessType);
context.addAccessTypeInformation( qualifiedName, newAccessTypeInfo );
updateEmbeddableAccessType( searchedElement, context, defaultAccessType );
setDefaultAccessTypeForMappedSuperclassesInHierarchy( searchedElement, defaultAccessType, context );
}
else {
// if we end up here we need to recursively look for superclasses
AccessType newDefaultAccessType = getDefaultAccessForHierarchy( searchedElement, context );
if ( newDefaultAccessType == null ) {
newDefaultAccessType = DEFAULT_ACCESS_TYPE;
}
final AccessTypeInformation newAccessTypeInfo =
new AccessTypeInformation( qualifiedName, null, newDefaultAccessType );
context.addAccessTypeInformation( qualifiedName, newAccessTypeInfo );
updateEmbeddableAccessType( searchedElement, context, newDefaultAccessType );
}
}
}
}
public static TypeMirror getCollectionElementType(
DeclaredType type, String returnTypeName, @Nullable String explicitTargetEntityName, Context context) {
if ( explicitTargetEntityName != null ) {
return context.getElementUtils().getTypeElement( explicitTargetEntityName ).asType();
}
else {
final List<? extends TypeMirror> typeArguments = type.getTypeArguments();
if ( typeArguments.isEmpty() ) {
throw new MetaModelGenerationException( "Unable to determine collection type" );
}
else if ( MAP.equals( returnTypeName ) ) {
return typeArguments.get( 1 );
}
else {
return typeArguments.get( 0 );
}
}
}
private static void updateEmbeddableAccessType(TypeElement element, Context context, AccessType defaultAccessType) {
for ( Element field : ElementFilter.fieldsIn( element.getEnclosedElements() ) ) {
updateEmbeddableAccessTypeForMember( context, defaultAccessType, field );
}
for ( Element method : ElementFilter.methodsIn( element.getEnclosedElements() ) ) {
updateEmbeddableAccessTypeForMember( context, defaultAccessType, method );
}
}
private static void updateEmbeddableAccessTypeForMember(Context context, AccessType defaultAccessType, Element member) {
final @Nullable TypeElement embedded = member.asType().accept( new EmbeddedAttributeVisitor( context ), member );
if ( embedded != null ) {
updateEmbeddableAccessType( context, defaultAccessType, embedded );
}
}
private static void updateEmbeddableAccessType(Context context, AccessType defaultAccessType, TypeElement embedded) {
final String embeddedClassName = embedded.getQualifiedName().toString();
final AccessType forcedAccessType = determineAnnotationSpecifiedAccessType( embedded );
final AccessTypeInformation accessTypeInfo =
forcedAccessType != null
? new AccessTypeInformation( embeddedClassName, null, forcedAccessType )
: context.getAccessTypeInfo( embeddedClassName );
if ( accessTypeInfo == null ) {
final AccessTypeInformation newAccessTypeInfo =
new AccessTypeInformation( embeddedClassName, null, defaultAccessType );
context.addAccessTypeInformation( embeddedClassName, newAccessTypeInfo );
updateEmbeddableAccessType( embedded, context, defaultAccessType );
final TypeMirror superclass = embedded.getSuperclass();
if ( superclass.getKind() == TypeKind.DECLARED ) {
final DeclaredType declaredType = (DeclaredType) superclass;
final TypeElement element = (TypeElement) declaredType.asElement();
updateEmbeddableAccessType( context, defaultAccessType, element );
}
}
else {
accessTypeInfo.setDefaultAccessType(defaultAccessType);
}
}
private static @Nullable AccessType getDefaultAccessForHierarchy(TypeElement element, Context context) {
AccessType defaultAccessType = null;
TypeElement superClass = element;
do {
superClass = getSuperclassTypeElement( superClass );
if ( superClass != null ) {
final String qualifiedName = superClass.getQualifiedName().toString();
final AccessTypeInformation accessTypeInfo = context.getAccessTypeInfo( qualifiedName );
if ( accessTypeInfo != null && accessTypeInfo.getDefaultAccessType() != null ) {
return accessTypeInfo.getDefaultAccessType();
}
if ( containsAnnotation( superClass, ENTITY, MAPPED_SUPERCLASS ) ) {
defaultAccessType = getAccessTypeInCaseElementIsRoot( superClass, context );
if ( defaultAccessType != null ) {
final AccessTypeInformation newAccessTypeInfo
= new AccessTypeInformation( qualifiedName, null, defaultAccessType );
context.addAccessTypeInformation( qualifiedName, newAccessTypeInfo );
// we found an id within the class hierarchy and determined a default access type
// there cannot be any super entity classes (otherwise it would be a configuration error)
// but there might be mapped super classes
// Also note, that if two different class hierarchies with different access types have a common
// mapped super class, the access type of the mapped super class will be the one of the last
// hierarchy processed. The result is not determined which is odd with the spec
setDefaultAccessTypeForMappedSuperclassesInHierarchy( superClass, defaultAccessType, context );
// we found a default access type, no need to look further
break;
}
else {
defaultAccessType = getDefaultAccessForHierarchy( superClass, context );
}
}
}
}
while ( superClass != null );
return defaultAccessType;
}
private static void setDefaultAccessTypeForMappedSuperclassesInHierarchy(TypeElement element, AccessType defaultAccessType, Context context) {
TypeElement superClass = element;
do {
superClass = getSuperclassTypeElement( superClass );
if ( superClass != null ) {
final String qualifiedName = superClass.getQualifiedName().toString();
if ( containsAnnotation( superClass, MAPPED_SUPERCLASS ) ) {
final AccessType forcedAccessType = determineAnnotationSpecifiedAccessType( superClass );
final AccessTypeInformation accessTypeInfo =
forcedAccessType != null
? new AccessTypeInformation( qualifiedName, null, forcedAccessType )
: new AccessTypeInformation( qualifiedName, null, defaultAccessType );
context.addAccessTypeInformation( qualifiedName, accessTypeInfo );
}
}
}
while ( superClass != null );
}
/**
* Iterates all elements of a type to check whether they contain the id annotation. If so the placement of this
* annotation determines the access type
*
* @param searchedElement the type to be searched
* @param context The global execution context
*
* @return returns the access type of the element annotated with the id annotation. If no element is annotated
* {@code null} is returned.
*/
private static @Nullable AccessType getAccessTypeInCaseElementIsRoot(TypeElement searchedElement, Context context) {
for ( Element subElement : searchedElement.getEnclosedElements() ) {
for ( AnnotationMirror entityAnnotation :
context.getElementUtils().getAllAnnotationMirrors( subElement ) ) {
if ( isIdAnnotation( entityAnnotation ) ) {
return getAccessTypeOfIdAnnotation( subElement );
}
}
}
return null;
}
private static @Nullable AccessType getAccessTypeOfIdAnnotation(Element element) {
return switch ( element.getKind() ) {
case FIELD -> AccessType.FIELD;
case METHOD -> AccessType.PROPERTY;
default -> null;
};
}
private static boolean isIdAnnotation(AnnotationMirror annotationMirror) {
return isAnnotationMirrorOfType( annotationMirror, ID )
|| isAnnotationMirrorOfType( annotationMirror, EMBEDDED_ID );
}
public static @Nullable AccessType determineAnnotationSpecifiedAccessType(Element element) {
final AnnotationMirror mirror = getAnnotationMirror( element, ACCESS );
if ( mirror != null ) {
final AnnotationValue accessType = getAnnotationValue( mirror );
if ( accessType != null ) {
final VariableElement enumValue = (VariableElement) accessType.getValue();
final Name enumValueName = enumValue.getSimpleName();
if ( enumValueName.contentEquals(AccessType.PROPERTY.name()) ) {
return AccessType.PROPERTY;
}
else if ( enumValueName.contentEquals(AccessType.FIELD.name()) ) {
return AccessType.FIELD;
}
}
}
return null;
}
public static ElementKind getElementKindForAccessType(AccessType accessType) {
return accessType == AccessType.FIELD ? ElementKind.FIELD : ElementKind.METHOD;
}
public static String getKeyType(DeclaredType type, Context context) {
final List<? extends TypeMirror> typeArguments = type.getTypeArguments();
if ( typeArguments.isEmpty() ) {
context.logMessage( Diagnostic.Kind.ERROR, "Unable to determine type argument for " + type );
return JAVA_OBJECT;
}
else {
return extractClosestRealTypeAsString( typeArguments.get( 0 ), context );
}
}
public static boolean isClassOrRecordType(Element element) {
final ElementKind kind = element.getKind();
// we want to accept classes and records but not enums,
// and we want to avoid depending on ElementKind.RECORD
return kind.isClass() && kind != ElementKind.ENUM;
}
public static boolean isClassRecordOrInterfaceType(Element element) {
final ElementKind kind = element.getKind();
// we want to accept classes and records but not enums,
// and we want to avoid depending on ElementKind.RECORD
return kind.isClass() && kind != ElementKind.ENUM
|| kind.isInterface() && kind != ElementKind.ANNOTATION_TYPE;
}
public static boolean primitiveClassMatchesKind(Class<?> itemType, TypeKind kind) {
return switch ( kind ) {
case SHORT -> itemType.equals( Short.class );
case INT -> itemType.equals( Integer.class );
case LONG -> itemType.equals( Long.class );
case BOOLEAN -> itemType.equals( Boolean.class );
case FLOAT -> itemType.equals( Float.class );
case DOUBLE -> itemType.equals( Double.class );
case CHAR -> itemType.equals( Character.class );
case BYTE -> itemType.equals( Byte.class );
default -> false;
};
}
public static boolean isPropertyGetter(ExecutableType executable, Element element) {
return element.getKind() == ElementKind.METHOD
&& isProperty( element.getSimpleName().toString(),
toTypeString( executable.getReturnType() ) );
}
public static boolean isPluralAttribute(Element element) {
// TODO: should MANY_TO_ANY be on this list?
return hasAnnotation( element, MANY_TO_MANY, ONE_TO_MANY, ELEMENT_COLLECTION );
}
public static @Nullable String getFullyQualifiedClassNameOfTargetEntity(
AnnotationMirror mirror, String member) {
final AnnotationValue value = getAnnotationValue( mirror, member);
if ( value != null ) {
final TypeMirror parameterType = (TypeMirror) value.getValue();
if ( parameterType.getKind() != TypeKind.VOID ) {
return parameterType.toString();
}
}
return null;
}
/**
* @param annotations list of annotation mirrors.
*
* @return target entity class name as string or {@code null} if no targetEntity is here or if equals to void
*/
public static @Nullable String getTargetEntity(List<? extends AnnotationMirror> annotations) {
for ( AnnotationMirror mirror : annotations ) {
if ( isAnnotationMirrorOfType( mirror, ELEMENT_COLLECTION ) ) {
return getFullyQualifiedClassNameOfTargetEntity( mirror, "targetClass" );
}
else if ( isAnnotationMirrorOfType( mirror, ONE_TO_MANY )
|| isAnnotationMirrorOfType( mirror, MANY_TO_MANY )
|| isAnnotationMirrorOfType( mirror, MANY_TO_ONE )
|| isAnnotationMirrorOfType( mirror, ONE_TO_ONE ) ) {
return getFullyQualifiedClassNameOfTargetEntity( mirror, "targetEntity" );
}
else if ( isAnnotationMirrorOfType( mirror, "org.hibernate.annotations.TargetEmbeddable") ) {
return getFullyQualifiedClassNameOfTargetEntity( mirror, "value" );
}
}
return null;
}
public static String propertyName(Element element) {
switch ( element.getKind() ) {
case FIELD:
return element.getSimpleName().toString();
case METHOD:
final Name name = element.getSimpleName();
if ( name.length() > 3 && name.subSequence( 0, 3 ).equals( "get" ) ) {
return decapitalize( name.subSequence( 3, name.length() ).toString() );
}
else if ( name.length() > 2 && name.subSequence( 0, 2 ).equals( "is" ) ) {
return decapitalize( name.subSequence( 2, name.length() ).toString() );
}
else {
return decapitalize( name.toString() );
}
default:
return element.getSimpleName() + "/* " + element.getKind() + " */";
}
}
public static @Nullable Element findMappedSuperElement(Metamodel entity, Context context) {
final Element element = entity.getElement();
if ( element instanceof TypeElement typeElement ) {
TypeMirror superClass = typeElement.getSuperclass();
//superclass of Object is of NoType which returns some other kind
while ( superClass.getKind() == TypeKind.DECLARED ) {
final DeclaredType declaredType = (DeclaredType) superClass;
final TypeElement superClassElement = (TypeElement) declaredType.asElement();
if ( extendsSuperMetaModel( superClassElement, entity.isMetaComplete(), context ) ) {
return superClassElement;
}
superClass = superClassElement.getSuperclass();
}
}
return null;
}
/**
* Checks whether this metamodel class needs to extend another metamodel class.
* This method checks whether the processor has generated a metamodel class for the super class, but it also
* allows for the possibility that the metamodel class was generated in a previous compilation. (It could be
* part of a separate jar. See also METAGEN-35.)
*
* @param superClassElement the super class element
* @param entityMetaComplete flag indicating if the entity for which the metamodel should be generated is
* metamodel complete. If so we cannot use reflection to decide whether we have to add the extends clause
* @param context the execution context
*
* @return {@code true} in case there is super class metamodel to extend from {@code false} otherwise.
*/
private static boolean extendsSuperMetaModel(Element superClassElement, boolean entityMetaComplete, Context context) {
// if we processed the superclass in the same run we definitely need to extend
final TypeElement typeElement = (TypeElement) superClassElement;
final String superClassName = typeElement.getQualifiedName().toString();
return context.containsMetaEntity( superClassName )
|| context.containsMetaEmbeddable( superClassName )
// to allow for the case that the metamodel class for the super entity is for example contained in another
// jar file we use reflection. However, we need to consider the fact that there is xml configuration
// and annotations should be ignored
|| !entityMetaComplete && containsAnnotation( superClassElement, ENTITY, MAPPED_SUPERCLASS );
}
public static boolean implementsInterface(TypeElement type, String interfaceName) {
for ( TypeMirror iface : type.getInterfaces() ) {
if ( iface.getKind() == TypeKind.DECLARED ) {
final DeclaredType declaredType = (DeclaredType) iface;
final TypeElement typeElement = (TypeElement) declaredType.asElement();
if ( typeElement.getQualifiedName().contentEquals( interfaceName )
|| implementsInterface( typeElement, interfaceName ) ) {
return true;
}
}
}
TypeMirror superclass = type.getSuperclass();
if ( superclass != null && superclass.getKind() == TypeKind.DECLARED ) {
final DeclaredType declaredType = (DeclaredType) superclass;
final TypeElement typeElement = (TypeElement) declaredType.asElement();
if ( implementsInterface( typeElement, interfaceName) ) {
return true;
}
}
return false;
}
public static boolean extendsClass(TypeElement type, String className) {
TypeMirror superclass = type.getSuperclass();
while ( superclass != null && superclass.getKind() == TypeKind.DECLARED ) {
final DeclaredType declaredType = (DeclaredType) superclass;
final TypeElement typeElement = (TypeElement) declaredType.asElement();
if ( typeElement.getQualifiedName().contentEquals( className ) ) {
return true;
}
superclass = typeElement.getSuperclass();
}
return false;
}
public static boolean isMemberType(Element element) {
return element.getEnclosingElement() instanceof TypeElement;
}
public static String getGeneratedClassFullyQualifiedName(TypeElement typeElement, boolean jakartaDataStyle) {
final String simpleName = typeElement.getSimpleName().toString();
final Element enclosingElement = typeElement.getEnclosingElement();
return qualifiedName( enclosingElement, jakartaDataStyle )
+ "." + (jakartaDataStyle ? '_' + simpleName : simpleName + '_');
}
private static String qualifiedName(Element enclosingElement, boolean jakartaDataStyle) {
if ( enclosingElement instanceof TypeElement typeElement ) {
return getGeneratedClassFullyQualifiedName( typeElement, jakartaDataStyle );
}
else if ( enclosingElement instanceof PackageElement packageElement ) {
return packageElement.getQualifiedName().toString();
}
else {
throw new MetaModelGenerationException( "Unexpected enclosing element: " + enclosingElement );
}
}
public static String getGeneratedClassFullyQualifiedName(TypeElement element, String packageName, boolean jakartaDataStyle) {
final StringBuilder builder = new StringBuilder( packageName );
final Name qualifiedName = element.getQualifiedName();
final String tail = qualifiedName.subSequence( builder.length(), qualifiedName.length() ).toString();
for ( String bit : split( ".", tail ) ) {
final String part = removeDollar( bit );
if ( !builder.isEmpty() ) {
builder.append( "." );
}
builder.append( jakartaDataStyle ? '_' + part : part + '_' );
}
return builder.toString();
}
static class EmbeddedAttributeVisitor extends SimpleTypeVisitor8<@Nullable TypeElement, Element> {
private final Context context;
EmbeddedAttributeVisitor(Context context) {
this.context = context;
}
@Override
public @Nullable TypeElement visitDeclared(DeclaredType declaredType, Element element) {
final TypeElement returnedElement = (TypeElement)
context.getTypeUtils().asElement( declaredType );
return containsAnnotation( castNonNull( returnedElement ), EMBEDDABLE ) ? returnedElement : null;
}
@Override
public @Nullable TypeElement visitExecutable(ExecutableType executable, Element element) {
if ( element.getKind().equals( ElementKind.METHOD ) ) {
final String string = element.getSimpleName().toString();
return isProperty( string, toTypeString( executable.getReturnType() ) )
? executable.getReturnType().accept(this, element)
: null;
}
else {
return null;
}
}
}
public static boolean isPrimitive(String paramType) {
return PRIMITIVE_TYPES.contains( paramType );
}
public static final Set<String> PRIMITIVE_TYPES =
Set.of("boolean", "char", "long", "int", "short", "byte", "double", "float");
public static String resolveTypeName(TypeElement typeElement, Element element, String name) {
final var mirror = resolveTypeMirror( typeElement, element, name );
return mirror == null ? name : mirror.toString();
}
public static @Nullable TypeMirror resolveTypeMirror(TypeElement typeElement, Element element, String name) {
final var mirrorMap = resolveTypeParameters( typeElement.asType(), element, Map.of(), new HashSet<>() );
return mirrorMap == null ? null : mirrorMap.get( name );
}
private static @Nullable Map<String, TypeMirror> resolveTypeParameters(TypeMirror type, Element element, Map<String, TypeMirror> parametersMap, Collection<Element> visited) {
if ( !(type instanceof DeclaredType declaredType
&& declaredType.asElement() instanceof TypeElement typeElement) ) {
return null;
}
if ( !visited.add( typeElement ) ) {
return null;
}
final var generic = typeElement.getTypeParameters();
final var map = new HashMap<String, TypeMirror>();
var typeArguments = declaredType.getTypeArguments();
if ( !(typeArguments.isEmpty() || generic.size() == typeArguments.size()) ) {
return null;
}
for ( var n = 0; n < generic.size(); ++n ) {
final var mirror = typeArguments.isEmpty()
? generic.get( 0 ).getBounds().get( 0 )
: typeArguments.get( n );
final var value = mirror.toString();
map.put( generic.get( n ).asType().toString(), parametersMap.getOrDefault( value, mirror ) );
}
if ( typeElement.equals( element ) ) {
return map;
}
return concat(
Stream.of( typeElement.getSuperclass() ),
typeElement.getInterfaces().stream()
).map( tm -> resolveTypeParameters( tm, element, map, visited ) )
.filter( Objects::nonNull )
.findFirst().orElse( null );
}
}
|
apache/kafka | 36,212 | connect/json/src/main/java/org/apache/kafka/connect/json/JsonConverter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.connect.json;
import org.apache.kafka.common.cache.Cache;
import org.apache.kafka.common.cache.LRUCache;
import org.apache.kafka.common.cache.SynchronizedCache;
import org.apache.kafka.common.config.ConfigDef;
import org.apache.kafka.common.errors.SerializationException;
import org.apache.kafka.common.utils.AppInfoParser;
import org.apache.kafka.common.utils.Utils;
import org.apache.kafka.connect.components.Versioned;
import org.apache.kafka.connect.data.ConnectSchema;
import org.apache.kafka.connect.data.Date;
import org.apache.kafka.connect.data.Decimal;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.SchemaAndValue;
import org.apache.kafka.connect.data.SchemaBuilder;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.data.Time;
import org.apache.kafka.connect.data.Timestamp;
import org.apache.kafka.connect.errors.DataException;
import org.apache.kafka.connect.storage.Converter;
import org.apache.kafka.connect.storage.ConverterType;
import org.apache.kafka.connect.storage.HeaderConverter;
import org.apache.kafka.connect.storage.StringConverterConfig;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import java.io.IOException;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
/**
* Implementation of {@link Converter} and {@link HeaderConverter} that uses JSON to store schemas and objects. By
* default this converter will serialize Connect keys, values, and headers with schemas, although this can be disabled with
* the {@link JsonConverterConfig#SCHEMAS_ENABLE_CONFIG schemas.enable} configuration option.
* <p>
* This implementation currently does nothing with the topic names or header keys.
*/
public class JsonConverter implements Converter, HeaderConverter, Versioned {
private static final Map<Schema.Type, JsonToConnectTypeConverter> TO_CONNECT_CONVERTERS = new EnumMap<>(Schema.Type.class);
static {
TO_CONNECT_CONVERTERS.put(Schema.Type.BOOLEAN, (schema, value, config) -> value.booleanValue());
TO_CONNECT_CONVERTERS.put(Schema.Type.INT8, (schema, value, config) -> (byte) value.intValue());
TO_CONNECT_CONVERTERS.put(Schema.Type.INT16, (schema, value, config) -> (short) value.intValue());
TO_CONNECT_CONVERTERS.put(Schema.Type.INT32, (schema, value, config) -> value.intValue());
TO_CONNECT_CONVERTERS.put(Schema.Type.INT64, (schema, value, config) -> value.longValue());
TO_CONNECT_CONVERTERS.put(Schema.Type.FLOAT32, (schema, value, config) -> value.floatValue());
TO_CONNECT_CONVERTERS.put(Schema.Type.FLOAT64, (schema, value, config) -> value.doubleValue());
TO_CONNECT_CONVERTERS.put(Schema.Type.BYTES, (schema, value, config) -> {
try {
return value.binaryValue();
} catch (IOException e) {
throw new DataException("Invalid bytes field", e);
}
});
TO_CONNECT_CONVERTERS.put(Schema.Type.STRING, (schema, value, config) -> value.textValue());
TO_CONNECT_CONVERTERS.put(Schema.Type.ARRAY, (schema, value, config) -> {
Schema elemSchema = schema == null ? null : schema.valueSchema();
ArrayList<Object> result = new ArrayList<>();
for (JsonNode elem : value) {
result.add(convertToConnect(elemSchema, elem, config));
}
return result;
});
TO_CONNECT_CONVERTERS.put(Schema.Type.MAP, (schema, value, config) -> {
Schema keySchema = schema == null ? null : schema.keySchema();
Schema valueSchema = schema == null ? null : schema.valueSchema();
// If the map uses strings for keys, it should be encoded in the natural JSON format. If it uses other
// primitive types or a complex type as a key, it will be encoded as a list of pairs. If we don't have a
// schema, we default to encoding in a Map.
Map<Object, Object> result = new HashMap<>();
if (schema == null || keySchema.type() == Schema.Type.STRING) {
if (!value.isObject())
throw new DataException("Maps with string fields should be encoded as JSON objects, but found " + value.getNodeType());
for (Map.Entry<String, JsonNode> entry : value.properties()) {
result.put(entry.getKey(), convertToConnect(valueSchema, entry.getValue(), config));
}
} else {
if (!value.isArray())
throw new DataException("Maps with non-string fields should be encoded as JSON array of tuples, but found " + value.getNodeType());
for (JsonNode entry : value) {
if (!entry.isArray())
throw new DataException("Found invalid map entry instead of array tuple: " + entry.getNodeType());
if (entry.size() != 2)
throw new DataException("Found invalid map entry, expected length 2 but found :" + entry.size());
result.put(convertToConnect(keySchema, entry.get(0), config),
convertToConnect(valueSchema, entry.get(1), config));
}
}
return result;
});
TO_CONNECT_CONVERTERS.put(Schema.Type.STRUCT, (schema, value, config) -> {
if (!value.isObject())
throw new DataException("Structs should be encoded as JSON objects, but found " + value.getNodeType());
// We only have ISchema here but need Schema, so we need to materialize the actual schema. Using ISchema
// avoids having to materialize the schema for non-Struct types but it cannot be avoided for Structs since
// they require a schema to be provided at construction. However, the schema is only a SchemaBuilder during
// translation of schemas to JSON; during the more common translation of data to JSON, the call to schema.schema()
// just returns the schema Object and has no overhead.
Struct result = new Struct(schema.schema());
for (Field field : schema.fields())
result.put(field, convertToConnect(field.schema(), value.get(field.name()), config));
return result;
});
}
// Convert values in Kafka Connect form into/from their logical types. These logical converters are discovered by logical type
// names specified in the field
private static final HashMap<String, LogicalTypeConverter> LOGICAL_CONVERTERS = new HashMap<>();
private static final JsonNodeFactory JSON_NODE_FACTORY = new JsonNodeFactory(true);
static {
LOGICAL_CONVERTERS.put(Decimal.LOGICAL_NAME, new LogicalTypeConverter() {
@Override
public JsonNode toJson(final Schema schema, final Object value, final JsonConverterConfig config) {
if (!(value instanceof BigDecimal decimal))
throw new DataException("Invalid type for Decimal, expected BigDecimal but was " + value.getClass());
return switch (config.decimalFormat()) {
case NUMERIC -> JSON_NODE_FACTORY.numberNode(decimal);
case BASE64 -> JSON_NODE_FACTORY.binaryNode(Decimal.fromLogical(schema, decimal));
};
}
@Override
public Object toConnect(final Schema schema, final JsonNode value) {
if (value.isNumber()) return value.decimalValue();
if (value.isBinary() || value.isTextual()) {
try {
return Decimal.toLogical(schema, value.binaryValue());
} catch (Exception e) {
throw new DataException("Invalid bytes for Decimal field", e);
}
}
throw new DataException("Invalid type for Decimal, underlying representation should be numeric or bytes but was " + value.getNodeType());
}
});
LOGICAL_CONVERTERS.put(Date.LOGICAL_NAME, new LogicalTypeConverter() {
@Override
public JsonNode toJson(final Schema schema, final Object value, final JsonConverterConfig config) {
if (!(value instanceof java.util.Date))
throw new DataException("Invalid type for Date, expected Date but was " + value.getClass());
return JSON_NODE_FACTORY.numberNode(Date.fromLogical(schema, (java.util.Date) value));
}
@Override
public Object toConnect(final Schema schema, final JsonNode value) {
if (!(value.isInt()))
throw new DataException("Invalid type for Date, underlying representation should be integer but was " + value.getNodeType());
return Date.toLogical(schema, value.intValue());
}
});
LOGICAL_CONVERTERS.put(Time.LOGICAL_NAME, new LogicalTypeConverter() {
@Override
public JsonNode toJson(final Schema schema, final Object value, final JsonConverterConfig config) {
if (!(value instanceof java.util.Date))
throw new DataException("Invalid type for Time, expected Date but was " + value.getClass());
return JSON_NODE_FACTORY.numberNode(Time.fromLogical(schema, (java.util.Date) value));
}
@Override
public Object toConnect(final Schema schema, final JsonNode value) {
if (!(value.isInt()))
throw new DataException("Invalid type for Time, underlying representation should be integer but was " + value.getNodeType());
return Time.toLogical(schema, value.intValue());
}
});
LOGICAL_CONVERTERS.put(Timestamp.LOGICAL_NAME, new LogicalTypeConverter() {
@Override
public JsonNode toJson(final Schema schema, final Object value, final JsonConverterConfig config) {
if (!(value instanceof java.util.Date))
throw new DataException("Invalid type for Timestamp, expected Date but was " + value.getClass());
return JSON_NODE_FACTORY.numberNode(Timestamp.fromLogical(schema, (java.util.Date) value));
}
@Override
public Object toConnect(final Schema schema, final JsonNode value) {
if (!(value.isIntegralNumber()))
throw new DataException("Invalid type for Timestamp, underlying representation should be integral but was " + value.getNodeType());
return Timestamp.toLogical(schema, value.longValue());
}
});
}
private JsonConverterConfig config;
private Cache<Schema, ObjectNode> fromConnectSchemaCache;
private Cache<JsonNode, Schema> toConnectSchemaCache;
private Schema schema = null; // if a schema is provided in config, this schema will be used for all messages for sink connector
private final JsonSerializer serializer;
private final JsonDeserializer deserializer;
public JsonConverter() {
this(true);
}
/**
* Creates a JsonConvert initializing serializer and deserializer.
*
* @param enableBlackbird permits to enable/disable the registration of Jackson Blackbird module.
* <p>
* NOTE: This is visible only for testing
*/
public JsonConverter(boolean enableBlackbird) {
serializer = new JsonSerializer(
Set.of(),
JSON_NODE_FACTORY,
enableBlackbird
);
deserializer = new JsonDeserializer(
Set.of(
// this ensures that the JsonDeserializer maintains full precision on
// floating point numbers that cannot fit into float64
DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS
),
JSON_NODE_FACTORY,
enableBlackbird
);
}
// visible for testing
long sizeOfFromConnectSchemaCache() {
return fromConnectSchemaCache.size();
}
// visible for testing
long sizeOfToConnectSchemaCache() {
return toConnectSchemaCache.size();
}
@Override
public String version() {
return AppInfoParser.getVersion();
}
@Override
public ConfigDef config() {
return JsonConverterConfig.configDef();
}
@Override
public void configure(Map<String, ?> configs) {
config = new JsonConverterConfig(configs);
serializer.configure(configs, config.type() == ConverterType.KEY);
deserializer.configure(configs, config.type() == ConverterType.KEY);
fromConnectSchemaCache = new SynchronizedCache<>(new LRUCache<>(config.schemaCacheSize()));
toConnectSchemaCache = new SynchronizedCache<>(new LRUCache<>(config.schemaCacheSize()));
try {
final byte[] schemaContent = config.schemaContent();
if (schemaContent != null) {
final JsonNode schemaNode = deserializer.deserialize("", schemaContent);
this.schema = asConnectSchema(schemaNode);
}
} catch (SerializationException e) {
throw new DataException("Failed to parse schema in converter config due to serialization error: ", e);
}
}
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
Map<String, Object> conf = new HashMap<>(configs);
conf.put(StringConverterConfig.TYPE_CONFIG, isKey ? ConverterType.KEY.getName() : ConverterType.VALUE.getName());
configure(conf);
}
@Override
public void close() {
Utils.closeQuietly(this.serializer, "JSON converter serializer");
Utils.closeQuietly(this.deserializer, "JSON converter deserializer");
}
@Override
public byte[] fromConnectHeader(String topic, String headerKey, Schema schema, Object value) {
return fromConnectData(topic, schema, value);
}
@Override
public SchemaAndValue toConnectHeader(String topic, String headerKey, byte[] value) {
return toConnectData(topic, value);
}
@Override
public byte[] fromConnectData(String topic, Schema schema, Object value) {
if (schema == null && value == null) {
return null;
}
JsonNode jsonValue = config.schemasEnabled() ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value);
try {
return serializer.serialize(topic, jsonValue);
} catch (SerializationException e) {
throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e);
}
}
@Override
public SchemaAndValue toConnectData(String topic, byte[] value) {
JsonNode jsonValue;
// This handles a tombstone message
if (value == null) {
return SchemaAndValue.NULL;
}
try {
jsonValue = deserializer.deserialize(topic, value);
} catch (SerializationException e) {
throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e);
}
if (config.schemasEnabled()) {
if (schema != null) {
return new SchemaAndValue(schema, convertToConnect(schema, jsonValue, config));
} else if (!jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME) || !jsonValue.has(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME)) {
throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." +
" If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration.");
}
} else {
// The deserialized data should either be an envelope object containing the schema and the payload or the schema
// was stripped during serialization and we need to fill in an all-encompassing schema.
ObjectNode envelope = JSON_NODE_FACTORY.objectNode();
envelope.set(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME, null);
envelope.set(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME, jsonValue);
jsonValue = envelope;
}
Schema schema = asConnectSchema(jsonValue.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
return new SchemaAndValue(
schema,
convertToConnect(schema, jsonValue.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME), config)
);
}
public ObjectNode asJsonSchema(Schema schema) {
if (schema == null)
return null;
ObjectNode cached = fromConnectSchemaCache.get(schema);
if (cached != null)
return cached;
final ObjectNode jsonSchema;
switch (schema.type()) {
case BOOLEAN:
jsonSchema = JsonSchema.BOOLEAN_SCHEMA.deepCopy();
break;
case BYTES:
jsonSchema = JsonSchema.BYTES_SCHEMA.deepCopy();
break;
case FLOAT64:
jsonSchema = JsonSchema.DOUBLE_SCHEMA.deepCopy();
break;
case FLOAT32:
jsonSchema = JsonSchema.FLOAT_SCHEMA.deepCopy();
break;
case INT8:
jsonSchema = JsonSchema.INT8_SCHEMA.deepCopy();
break;
case INT16:
jsonSchema = JsonSchema.INT16_SCHEMA.deepCopy();
break;
case INT32:
jsonSchema = JsonSchema.INT32_SCHEMA.deepCopy();
break;
case INT64:
jsonSchema = JsonSchema.INT64_SCHEMA.deepCopy();
break;
case STRING:
jsonSchema = JsonSchema.STRING_SCHEMA.deepCopy();
break;
case ARRAY:
jsonSchema = JSON_NODE_FACTORY.objectNode().put(JsonSchema.SCHEMA_TYPE_FIELD_NAME, JsonSchema.ARRAY_TYPE_NAME);
jsonSchema.set(JsonSchema.ARRAY_ITEMS_FIELD_NAME, asJsonSchema(schema.valueSchema()));
break;
case MAP:
jsonSchema = JSON_NODE_FACTORY.objectNode().put(JsonSchema.SCHEMA_TYPE_FIELD_NAME, JsonSchema.MAP_TYPE_NAME);
jsonSchema.set(JsonSchema.MAP_KEY_FIELD_NAME, asJsonSchema(schema.keySchema()));
jsonSchema.set(JsonSchema.MAP_VALUE_FIELD_NAME, asJsonSchema(schema.valueSchema()));
break;
case STRUCT:
jsonSchema = JSON_NODE_FACTORY.objectNode().put(JsonSchema.SCHEMA_TYPE_FIELD_NAME, JsonSchema.STRUCT_TYPE_NAME);
ArrayNode fields = JSON_NODE_FACTORY.arrayNode();
for (Field field : schema.fields()) {
ObjectNode fieldJsonSchema = asJsonSchema(field.schema()).deepCopy();
fieldJsonSchema.put(JsonSchema.STRUCT_FIELD_NAME_FIELD_NAME, field.name());
fields.add(fieldJsonSchema);
}
jsonSchema.set(JsonSchema.STRUCT_FIELDS_FIELD_NAME, fields);
break;
default:
throw new DataException("Couldn't translate unsupported schema type " + schema + ".");
}
jsonSchema.put(JsonSchema.SCHEMA_OPTIONAL_FIELD_NAME, schema.isOptional());
if (schema.name() != null)
jsonSchema.put(JsonSchema.SCHEMA_NAME_FIELD_NAME, schema.name());
if (schema.version() != null)
jsonSchema.put(JsonSchema.SCHEMA_VERSION_FIELD_NAME, schema.version());
if (schema.doc() != null)
jsonSchema.put(JsonSchema.SCHEMA_DOC_FIELD_NAME, schema.doc());
if (schema.parameters() != null) {
ObjectNode jsonSchemaParams = JSON_NODE_FACTORY.objectNode();
for (Map.Entry<String, String> prop : schema.parameters().entrySet())
jsonSchemaParams.put(prop.getKey(), prop.getValue());
jsonSchema.set(JsonSchema.SCHEMA_PARAMETERS_FIELD_NAME, jsonSchemaParams);
}
if (schema.defaultValue() != null)
jsonSchema.set(JsonSchema.SCHEMA_DEFAULT_FIELD_NAME, convertToJson(schema, schema.defaultValue()));
fromConnectSchemaCache.put(schema, jsonSchema);
return jsonSchema;
}
public Schema asConnectSchema(JsonNode jsonSchema) {
if (jsonSchema.isNull())
return null;
Schema cached = toConnectSchemaCache.get(jsonSchema);
if (cached != null)
return cached;
JsonNode schemaTypeNode = jsonSchema.get(JsonSchema.SCHEMA_TYPE_FIELD_NAME);
if (schemaTypeNode == null || !schemaTypeNode.isTextual())
throw new DataException("Schema must contain 'type' field");
final SchemaBuilder builder;
switch (schemaTypeNode.textValue()) {
case JsonSchema.BOOLEAN_TYPE_NAME:
builder = SchemaBuilder.bool();
break;
case JsonSchema.INT8_TYPE_NAME:
builder = SchemaBuilder.int8();
break;
case JsonSchema.INT16_TYPE_NAME:
builder = SchemaBuilder.int16();
break;
case JsonSchema.INT32_TYPE_NAME:
builder = SchemaBuilder.int32();
break;
case JsonSchema.INT64_TYPE_NAME:
builder = SchemaBuilder.int64();
break;
case JsonSchema.FLOAT_TYPE_NAME:
builder = SchemaBuilder.float32();
break;
case JsonSchema.DOUBLE_TYPE_NAME:
builder = SchemaBuilder.float64();
break;
case JsonSchema.BYTES_TYPE_NAME:
builder = SchemaBuilder.bytes();
break;
case JsonSchema.STRING_TYPE_NAME:
builder = SchemaBuilder.string();
break;
case JsonSchema.ARRAY_TYPE_NAME:
JsonNode elemSchema = jsonSchema.get(JsonSchema.ARRAY_ITEMS_FIELD_NAME);
if (elemSchema == null || elemSchema.isNull())
throw new DataException("Array schema did not specify the element type");
builder = SchemaBuilder.array(asConnectSchema(elemSchema));
break;
case JsonSchema.MAP_TYPE_NAME:
JsonNode keySchema = jsonSchema.get(JsonSchema.MAP_KEY_FIELD_NAME);
if (keySchema == null)
throw new DataException("Map schema did not specify the key type");
JsonNode valueSchema = jsonSchema.get(JsonSchema.MAP_VALUE_FIELD_NAME);
if (valueSchema == null)
throw new DataException("Map schema did not specify the value type");
builder = SchemaBuilder.map(asConnectSchema(keySchema), asConnectSchema(valueSchema));
break;
case JsonSchema.STRUCT_TYPE_NAME:
builder = SchemaBuilder.struct();
JsonNode fields = jsonSchema.get(JsonSchema.STRUCT_FIELDS_FIELD_NAME);
if (fields == null || !fields.isArray())
throw new DataException("Struct schema's \"fields\" argument is not an array.");
for (JsonNode field : fields) {
JsonNode jsonFieldName = field.get(JsonSchema.STRUCT_FIELD_NAME_FIELD_NAME);
if (jsonFieldName == null || !jsonFieldName.isTextual())
throw new DataException("Struct schema's field name not specified properly");
builder.field(jsonFieldName.asText(), asConnectSchema(field));
}
break;
default:
throw new DataException("Unknown schema type: " + schemaTypeNode.textValue());
}
JsonNode schemaOptionalNode = jsonSchema.get(JsonSchema.SCHEMA_OPTIONAL_FIELD_NAME);
if (schemaOptionalNode != null && schemaOptionalNode.isBoolean() && schemaOptionalNode.booleanValue())
builder.optional();
else
builder.required();
JsonNode schemaNameNode = jsonSchema.get(JsonSchema.SCHEMA_NAME_FIELD_NAME);
if (schemaNameNode != null && schemaNameNode.isTextual())
builder.name(schemaNameNode.textValue());
JsonNode schemaVersionNode = jsonSchema.get(JsonSchema.SCHEMA_VERSION_FIELD_NAME);
if (schemaVersionNode != null && schemaVersionNode.isIntegralNumber()) {
builder.version(schemaVersionNode.intValue());
}
JsonNode schemaDocNode = jsonSchema.get(JsonSchema.SCHEMA_DOC_FIELD_NAME);
if (schemaDocNode != null && schemaDocNode.isTextual())
builder.doc(schemaDocNode.textValue());
JsonNode schemaParamsNode = jsonSchema.get(JsonSchema.SCHEMA_PARAMETERS_FIELD_NAME);
if (schemaParamsNode != null && schemaParamsNode.isObject()) {
for (Map.Entry<String, JsonNode> entry : schemaParamsNode.properties()) {
JsonNode paramValue = entry.getValue();
if (!paramValue.isTextual())
throw new DataException("Schema parameters must have string values.");
builder.parameter(entry.getKey(), paramValue.textValue());
}
}
JsonNode schemaDefaultNode = jsonSchema.get(JsonSchema.SCHEMA_DEFAULT_FIELD_NAME);
if (schemaDefaultNode != null)
builder.defaultValue(convertToConnect(builder, schemaDefaultNode, config));
Schema result = builder.build();
toConnectSchemaCache.put(jsonSchema, result);
return result;
}
/**
* Convert this object, in the {@link org.apache.kafka.connect.data} format, into a JSON object with an envelope
* object containing schema and payload fields.
* @param schema the schema for the data
* @param value the value
* @return JsonNode-encoded version
*/
private JsonNode convertToJsonWithEnvelope(Schema schema, Object value) {
return new JsonSchema.Envelope(asJsonSchema(schema), convertToJson(schema, value)).toJsonNode();
}
private JsonNode convertToJsonWithoutEnvelope(Schema schema, Object value) {
return convertToJson(schema, value);
}
/**
* Convert this object, in the {@link org.apache.kafka.connect.data} format, into a JSON object, returning both the
* schema and the converted object.
*/
private JsonNode convertToJson(Schema schema, Object value) {
if (value == null) {
if (schema == null) // Any schema is valid and we don't have a default, so treat this as an optional schema
return null;
if (schema.defaultValue() != null && config.replaceNullWithDefault())
return convertToJson(schema, schema.defaultValue());
if (schema.isOptional())
return JSON_NODE_FACTORY.nullNode();
throw new DataException("Conversion error: null value for field that is required and has no default value");
}
if (schema != null && schema.name() != null) {
LogicalTypeConverter logicalConverter = LOGICAL_CONVERTERS.get(schema.name());
if (logicalConverter != null)
return logicalConverter.toJson(schema, value, config);
}
try {
final Schema.Type schemaType;
if (schema == null) {
schemaType = ConnectSchema.schemaType(value.getClass());
if (schemaType == null)
throw new DataException("Java class " + value.getClass() + " does not have corresponding schema type.");
} else {
schemaType = schema.type();
}
switch (schemaType) {
case INT8:
return JSON_NODE_FACTORY.numberNode((Byte) value);
case INT16:
return JSON_NODE_FACTORY.numberNode((Short) value);
case INT32:
return JSON_NODE_FACTORY.numberNode((Integer) value);
case INT64:
return JSON_NODE_FACTORY.numberNode((Long) value);
case FLOAT32:
return JSON_NODE_FACTORY.numberNode((Float) value);
case FLOAT64:
return JSON_NODE_FACTORY.numberNode((Double) value);
case BOOLEAN:
return JSON_NODE_FACTORY.booleanNode((Boolean) value);
case STRING:
CharSequence charSeq = (CharSequence) value;
return JSON_NODE_FACTORY.textNode(charSeq.toString());
case BYTES:
if (value instanceof byte[])
return JSON_NODE_FACTORY.binaryNode((byte[]) value);
else if (value instanceof ByteBuffer)
return JSON_NODE_FACTORY.binaryNode(((ByteBuffer) value).array());
else
throw new DataException("Invalid type for bytes type: " + value.getClass());
case ARRAY: {
Collection<?> collection = (Collection<?>) value;
ArrayNode list = JSON_NODE_FACTORY.arrayNode();
for (Object elem : collection) {
Schema valueSchema = schema == null ? null : schema.valueSchema();
JsonNode fieldValue = convertToJson(valueSchema, elem);
list.add(fieldValue);
}
return list;
}
case MAP: {
Map<?, ?> map = (Map<?, ?>) value;
// If true, using string keys and JSON object; if false, using non-string keys and Array-encoding
boolean objectMode;
if (schema == null) {
objectMode = true;
for (Map.Entry<?, ?> entry : map.entrySet()) {
if (!(entry.getKey() instanceof String)) {
objectMode = false;
break;
}
}
} else {
objectMode = schema.keySchema().type() == Schema.Type.STRING;
}
ObjectNode obj = null;
ArrayNode list = null;
if (objectMode)
obj = JSON_NODE_FACTORY.objectNode();
else
list = JSON_NODE_FACTORY.arrayNode();
for (Map.Entry<?, ?> entry : map.entrySet()) {
Schema keySchema = schema == null ? null : schema.keySchema();
Schema valueSchema = schema == null ? null : schema.valueSchema();
JsonNode mapKey = convertToJson(keySchema, entry.getKey());
JsonNode mapValue = convertToJson(valueSchema, entry.getValue());
if (objectMode)
obj.set(mapKey.asText(), mapValue);
else
list.add(JSON_NODE_FACTORY.arrayNode().add(mapKey).add(mapValue));
}
return objectMode ? obj : list;
}
case STRUCT: {
Struct struct = (Struct) value;
if (!struct.schema().equals(schema))
throw new DataException("Mismatching schema.");
ObjectNode obj = JSON_NODE_FACTORY.objectNode();
for (Field field : schema.fields()) {
obj.set(field.name(), convertToJson(field.schema(), struct.getWithoutDefault(field.name())));
}
return obj;
}
}
throw new DataException("Couldn't convert " + value + " to JSON.");
} catch (ClassCastException e) {
String schemaTypeStr = (schema != null) ? schema.type().toString() : "unknown schema";
throw new DataException("Invalid type for " + schemaTypeStr + ": " + value.getClass());
}
}
private static Object convertToConnect(Schema schema, JsonNode jsonValue, JsonConverterConfig config) {
final Schema.Type schemaType;
if (schema != null) {
schemaType = schema.type();
if (jsonValue == null || jsonValue.isNull()) {
if (schema.defaultValue() != null && config.replaceNullWithDefault())
return schema.defaultValue(); // any logical type conversions should already have been applied
if (schema.isOptional())
return null;
throw new DataException("Invalid null value for required " + schemaType + " field");
}
} else {
switch (jsonValue.getNodeType()) {
case NULL:
case MISSING:
// Special case. With no schema
return null;
case BOOLEAN:
schemaType = Schema.Type.BOOLEAN;
break;
case NUMBER:
if (jsonValue.isIntegralNumber())
schemaType = Schema.Type.INT64;
else
schemaType = Schema.Type.FLOAT64;
break;
case ARRAY:
schemaType = Schema.Type.ARRAY;
break;
case OBJECT:
schemaType = Schema.Type.MAP;
break;
case STRING:
schemaType = Schema.Type.STRING;
break;
case BINARY:
case POJO:
default:
schemaType = null;
break;
}
}
final JsonToConnectTypeConverter typeConverter = TO_CONNECT_CONVERTERS.get(schemaType);
if (typeConverter == null)
throw new DataException("Unknown schema type: " + schemaType);
if (schema != null && schema.name() != null) {
LogicalTypeConverter logicalConverter = LOGICAL_CONVERTERS.get(schema.name());
if (logicalConverter != null)
return logicalConverter.toConnect(schema, jsonValue);
}
return typeConverter.convert(schema, jsonValue, config);
}
private interface JsonToConnectTypeConverter {
Object convert(Schema schema, JsonNode value, JsonConverterConfig config);
}
private interface LogicalTypeConverter {
JsonNode toJson(Schema schema, Object value, JsonConverterConfig config);
Object toConnect(Schema schema, JsonNode value);
}
}
|
google/closure-compiler | 36,044 | src/com/google/javascript/refactoring/SuggestedFix.java | /*
* Copyright 2014 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.refactoring;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.Streams.stream;
import static java.lang.Math.min;
import static java.util.Objects.requireNonNull;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.SetMultimap;
import com.google.errorprone.annotations.InlineMe;
import com.google.javascript.jscomp.AbstractCompiler;
import com.google.javascript.jscomp.CodePrinter;
import com.google.javascript.jscomp.CompilerOptions;
import com.google.javascript.jscomp.NodeUtil;
import com.google.javascript.jscomp.parsing.JsDocInfoParser;
import com.google.javascript.rhino.IR;
import com.google.javascript.rhino.JSDocInfo;
import com.google.javascript.rhino.JSTypeExpression;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.NonJSDocComment;
import com.google.javascript.rhino.Token;
import com.google.javascript.rhino.jstype.JSType;
import java.util.Collection;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.jspecify.annotations.Nullable;
/**
* Object representing the fixes to apply to the source code to create the
* refactoring CL. To create a class, use the {@link Builder} class and helper
* functions.
*/
public final class SuggestedFix {
static enum ImportType {
REQUIRE,
REQUIRE_TYPE;
}
private final MatchedNodeInfo matchedNodeInfo;
// Multimap of filename to a modification to that file.
private final SetMultimap<String, CodeReplacement> replacements;
// An optional description of the fix, to distinguish between the various possible fixes
// for errors that have multiple fixes.
private final @Nullable String description;
// Alternative fixes for the same problem. The fix itself is always the first entry in this list.
// If you cannot ask the developer which fix is appropriate, apply the first fix instead of
// any alternatives.
private final ImmutableList<SuggestedFix> alternatives;
private SuggestedFix(
MatchedNodeInfo matchedNodeInfo,
SetMultimap<String, CodeReplacement> replacements,
@Nullable String description,
ImmutableList<SuggestedFix> alternatives) {
this.matchedNodeInfo = matchedNodeInfo;
this.replacements = replacements;
this.description = description;
this.alternatives =
ImmutableList.<SuggestedFix>builder().add(this).addAll(alternatives).build();
}
/**
* Returns information about the original JS Compiler Node that caused this SuggestedFix to be
* constructed.
*/
public MatchedNodeInfo getMatchedNodeInfo() {
return matchedNodeInfo;
}
/**
* Returns a multimap from filename to all the replacements that should be
* applied for this given fix.
*/
public SetMultimap<String, CodeReplacement> getReplacements() {
return replacements;
}
public @Nullable String getDescription() {
return description;
}
/** Get all possible fixes for this problem, including this fix. */
public ImmutableList<SuggestedFix> getAlternatives() {
return alternatives;
}
/** Get all alternative fixes, excluding this fix. */
public ImmutableList<SuggestedFix> getNonDefaultAlternatives() {
return alternatives.subList(1, alternatives.size());
}
boolean isNoOp() {
return replacements.isEmpty();
}
@Override
public String toString() {
if (this.isNoOp()) {
return "<no-op SuggestedFix>";
}
StringBuilder sb = new StringBuilder();
for (Map.Entry<String, Collection<CodeReplacement>> entry : replacements.asMap().entrySet()) {
sb.append("Replacements for file: ").append(entry.getKey()).append("\n");
Joiner.on("\n\n").appendTo(sb, entry.getValue());
}
return sb.toString();
}
/**
* Builder class for {@link SuggestedFix} that contains helper functions to manipulate JS nodes.
*/
public static final class Builder {
private @Nullable MatchedNodeInfo matchedNodeInfo = null;
private final ImmutableSetMultimap.Builder<String, CodeReplacement> replacements =
ImmutableSetMultimap.builder();
private final ImmutableList.Builder<SuggestedFix> alternatives = ImmutableList.builder();
private @Nullable String description = null;
/**
* Sets the node on this SuggestedFix that caused this SuggestedFix to be built in the first
* place.
*/
public Builder attachMatchedNodeInfo(Node node, AbstractCompiler compiler) {
matchedNodeInfo =
MatchedNodeInfo.create(
node, RefactoringUtils.isInClosurizedFile(node, new NodeMetadata(compiler)));
return this;
}
public Builder addAlternative(SuggestedFix alternative) {
checkState(
alternative.getNonDefaultAlternatives().isEmpty(),
"Alternative SuggestedFix must have no alternatives of their own.");
alternatives.add(alternative);
return this;
}
/**
* Replaces text starting at the given node position.
*/
Builder replaceText(Node node, int length, String newContent) {
int startPosition = node.getSourceOffset();
replacements.put(
node.getSourceFileName(), CodeReplacement.create(startPosition, length, newContent));
return this;
}
/**
* Inserts a new node as the first child of the provided node.
*/
public Builder addChildToFront(Node parentNode, String content) {
checkState(
parentNode.isBlock(), "addChildToFront is only supported for BLOCK statements.");
int startPosition = parentNode.getSourceOffset() + 1;
replacements.put(
parentNode.getSourceFileName(), CodeReplacement.create(startPosition, 0, "\n" + content));
return this;
}
/**
* Inserts the text after the given node
*/
public Builder insertAfter(Node node, String text) {
int position = node.getSourceOffset() + node.getLength();
replacements.put(node.getSourceFileName(), CodeReplacement.create(position, 0, text));
return this;
}
/**
* Inserts a new node before the provided node.
*/
public Builder insertBefore(Node nodeToInsertBefore, Node n, AbstractCompiler compiler) {
return insertBefore(nodeToInsertBefore, n, compiler, "");
}
Builder insertBefore(
Node nodeToInsertBefore, Node n, AbstractCompiler compiler, String sortKey) {
return insertBefore(nodeToInsertBefore, generateCode(compiler, n), sortKey);
}
/**
* Inserts a string before the provided node. This is useful for inserting
* comments into a file since the JS Compiler doesn't currently support
* printing comments.
*/
public Builder insertBefore(Node nodeToInsertBefore, String content) {
return insertBefore(nodeToInsertBefore, content, "");
}
private Builder insertBefore(Node nodeToInsertBefore, String content, String sortKey) {
int startPosition = getStartPositionForNodeConsideringComments(nodeToInsertBefore);
Preconditions.checkNotNull(nodeToInsertBefore.getSourceFileName(),
"No source file name for node: %s", nodeToInsertBefore);
replacements.put(
nodeToInsertBefore.getSourceFileName(),
CodeReplacement.create(startPosition, 0, content, sortKey));
return this;
}
/**
* Deletes a node and its contents from the source file. If the node is a child of a
* block or top level statement, this will also delete the whitespace before the node.
*/
public Builder delete(Node n) {
return delete(n, true);
}
/** Deletes a node and its contents from the source file. */
private Builder delete(Node n, boolean deleteWhitespaceBefore) {
int startPosition = getStartPositionForNodeConsideringComments(n);
int startOffsetWithoutComments = n.getSourceOffset();
int length = (startOffsetWithoutComments - startPosition) + n.getLength();
if (n.getNext() != null
&& NodeUtil.getBestJSDocInfo(n.getNext()) == null
&& n.getNext().getNonJSDocComment() == null) {
length = n.getNext().getSourceOffset() - startPosition;
}
// Variable declarations and string keys require special handling since the node doesn't
// contain enough if it has a child. The NAME node in a var/let/const declaration doesn't
// include its child in its length, and the code needs to know how to delete the commas.
// The same is true for string keys in object literals and object destructuring patterns.
// TODO(mknichel): Move this logic and the start position logic to a helper function
// so that it can be reused in other methods.
if ((n.isName() && NodeUtil.isNameDeclaration(n.getParent())) || n.isStringKey()) {
if (n.getNext() != null) {
length = getStartPositionForNodeConsideringComments(n.getNext()) - startPosition;
} else if (n.hasChildren()) {
Node child = n.getFirstChild();
length = (child.getSourceOffset() + child.getLength()) - startPosition;
}
if (n.getParent().getLastChild() == n && n != n.getParent().getFirstChild()) {
Node previousSibling = n.getPrevious();
if (previousSibling.hasChildren()) {
Node child = previousSibling.getFirstChild();
int startPositionDiff = startPosition - (child.getSourceOffset() + child.getLength());
startPosition -= startPositionDiff;
length += startPositionDiff;
} else {
int startPositionDiff =
startPosition - (previousSibling.getSourceOffset() + previousSibling.getLength());
startPosition -= startPositionDiff;
length += startPositionDiff;
}
}
}
Node parent = n.getParent();
if (deleteWhitespaceBefore
&& parent != null
&& (parent.isScript() || parent.isBlock())) {
Node previousSibling = n.getPrevious();
if (previousSibling != null) {
int previousSiblingEndPosition =
previousSibling.getSourceOffset() + previousSibling.getLength();
length += (startPosition - previousSiblingEndPosition);
startPosition = previousSiblingEndPosition;
}
}
replacements.put(n.getSourceFileName(), CodeReplacement.create(startPosition, length, ""));
return this;
}
/** Deletes a node and its contents from the source file. */
public Builder deleteWithoutRemovingWhitespaceBefore(Node n) {
return delete(n, false);
}
/** Deletes a node without touching any surrounding whitespace. */
public Builder deleteWithoutRemovingWhitespace(Node n) {
replacements.put(
n.getSourceFileName(), CodeReplacement.create(n.getSourceOffset(), n.getLength(), ""));
return this;
}
/**
* Renames a given node to the provided name.
* @param n The node to rename.
* @param name The new name for the node.
*/
public Builder rename(Node n, String name) {
return rename(n, name, false);
}
/**
* Renames a given node to the provided name.
*
* @param n The node to rename.
* @param name The new name for the node.
* @param replaceNameSubtree True to replace the entire name subtree below the node. The default
* is to replace just the last property in the node with the new name. For instance, if
* {@code replaceNameSubtree} is false, then {@code this.foo()} will be renamed to {@code
* this.bar()}. However, if it is true, it will be renamed to {@code bar()}.
*/
public Builder rename(Node n, String name, boolean replaceNameSubtree) {
final Node range;
switch (n.getToken()) {
case CALL:
case TAGGED_TEMPLATELIT:
return this.rename(n.getFirstChild(), name, replaceNameSubtree);
case GETPROP:
range = replaceNameSubtree ? subtreeRangeOfIdentifier(n) : n;
break;
case STRINGLIT:
checkState(n.getParent().isGetProp(), n);
// Fall through
case STRING_KEY:
case NAME:
range = n;
break;
default:
throw new UnsupportedOperationException(
"Rename is not implemented for this node type: " + n);
}
replacements.put(
range.getSourceFileName(),
CodeReplacement.create(range.getSourceOffset(), range.getLength(), name));
return this;
}
/**
* Replaces a range of nodes with the given content.
*/
public Builder replaceRange(Node first, Node last, String newContent) {
checkState(first.getParent() == last.getParent());
int start = getStartPositionForNodeConsideringComments(first);
if (start == 0) {
// if there are file-level comments at the top of the file, we do not wish to remove them
start = first.getSourceOffset();
}
int end = last.getSourceOffset() + last.getLength();
int length = end - start;
replacements.put(
first.getSourceFileName(), CodeReplacement.create(start, length, newContent));
return this;
}
/**
* Replaces the provided node with new node in the source file.
*/
public Builder replace(Node original, Node newNode, AbstractCompiler compiler) {
Node parent = original.getParent();
// EXPR_RESULT nodes will contain the trailing semicolons, but the child node
// will not. Replace the EXPR_RESULT node to ensure that the semicolons are
// correct in the final output.
if (parent != null && parent.isExprResult()) {
original = parent;
}
// TODO(mknichel): Move this logic to CodePrinter.
String newCode = generateCode(compiler, newNode);
// The generated code may contain a trailing newline but that is never wanted.
if (newCode.endsWith("\n")) {
newCode = newCode.substring(0, newCode.length() - 1);
}
// Most replacements don't need the semicolon in the new generated code - however, some
// statements that are blocks or expressions will need the semicolon.
boolean needsSemicolon =
parent != null
&& (parent.isExprResult()
|| parent.isBlock()
|| parent.isScript()
|| parent.isModuleBody());
if (newCode.endsWith(";") && !needsSemicolon) {
newCode = newCode.substring(0, newCode.length() - 1);
}
// If the replacement has lower precedence then we may need to add parentheses.
if (parent != null && IR.mayBeExpression(parent)) {
Node replacement = newNode;
while ((replacement.isBlock() || replacement.isScript() || replacement.isModuleBody())
&& replacement.hasOneChild()) {
replacement = replacement.getOnlyChild();
}
if (replacement.isExprResult()) {
replacement = replacement.getOnlyChild();
}
if (IR.mayBeExpression(replacement)) {
int outer = NodeUtil.precedence(parent.getToken());
int inner = NodeUtil.precedence(original.getToken());
int newInner = NodeUtil.precedence(replacement.getToken());
if (newInner < NodeUtil.precedence(Token.CALL) && newInner <= outer && inner >= outer) {
newCode = "(" + newCode + ")";
}
}
}
Node range = original;
if (original.isGetProp()) {
range = subtreeRangeOfIdentifier(original);
}
replacements.put(
range.getSourceFileName(),
CodeReplacement.create(range.getSourceOffset(), range.getLength(), newCode));
return this;
}
/**
* Adds a cast of the given type to the provided node.
*/
public Builder addCast(Node n, AbstractCompiler compiler, String type) {
// TODO(mknichel): Figure out the best way to output the typecast.
replacements.put(
n.getSourceFileName(),
CodeReplacement.create(
n.getSourceOffset(),
n.getLength(),
"/** @type {" + type + "} */ (" + generateCode(compiler, n) + ")"));
return this;
}
/**
* Removes a cast from the given node.
*/
public Builder removeCast(Node n, AbstractCompiler compiler) {
checkArgument(n.isCast());
JSDocInfo jsDoc = n.getJSDocInfo();
replacements.put(
n.getSourceFileName(),
CodeReplacement.create(
jsDoc.getOriginalCommentPosition(),
n.getFirstChild().getSourceOffset() - jsDoc.getOriginalCommentPosition(),
""));
replacements.put(
n.getSourceFileName(),
CodeReplacement.create(n.getSourceOffset() + n.getLength() - 1, /* length= */ 1, ""));
return this;
}
/**
* Adds or replaces the JS Doc for the given node.
*/
public Builder addOrReplaceJsDoc(Node n, String newJsDoc) {
int offset = n.getSourceOffset();
int length = 0;
if (n.isGetProp()) {
offset = subtreeRangeOfIdentifier(n).getSourceOffset();
}
JSDocInfo jsDoc = NodeUtil.getBestJSDocInfo(n);
if (jsDoc != null) {
offset = jsDoc.getOriginalCommentPosition();
length = jsDoc.getOriginalCommentString().length();
}
replacements.put(n.getSourceFileName(), CodeReplacement.create(offset, length, newJsDoc));
return this;
}
/**
* Changes the JS Doc Type of the given node.
*/
public Builder changeJsDocType(Node n, AbstractCompiler compiler, String type) {
Node typeNode = JsDocInfoParser.parseTypeString(type);
Preconditions.checkNotNull(typeNode, "Invalid type: %s", type);
JSTypeExpression typeExpr = new JSTypeExpression(typeNode, "jsflume");
JSType newJsType = typeExpr.evaluate(null, compiler.getTypeRegistry());
if (newJsType == null) {
throw new RuntimeException("JS Compiler does not recognize type: " + type);
}
// TODO(mknichel): Use the JSDocInfoParser to find the end of the type declaration. This
// would also handle multiple lines, and record types (which contain '{')
// Only "@type" allows type names without "{}"
replaceTypePattern(n, type, Pattern.compile(
"@(type) *\\{?[^@\\s}]+\\}?"));
// Text following other annotations may be a comment, not a type.
replaceTypePattern(n, type, Pattern.compile(
"@(export|package|private|protected|public|const|return) *\\{[^}]+\\}"));
return this;
}
// The pattern supplied here should have one matching group, the annotation with
// associated the type expression, the entire pattern should match the annotation and
// the type expression to be replaced.
private void replaceTypePattern(Node n, String type, Pattern pattern) {
JSDocInfo info = NodeUtil.getBestJSDocInfo(n);
Preconditions.checkNotNull(info, "Node %s does not have JS Doc associated with it.", n);
String originalComment = info.getOriginalCommentString();
int originalPosition = info.getOriginalCommentPosition();
if (originalComment != null) {
Matcher m = pattern.matcher(originalComment);
while (m.find()) {
replacements.put(
n.getSourceFileName(),
CodeReplacement.create(
originalPosition + m.start(),
m.end() - m.start(),
"@" + m.group(1) + " {" + type + "}"));
}
}
}
/**
* Inserts arguments into an existing function call.
*/
public Builder insertArguments(Node n, int position, String... args) {
checkArgument(n.isCall(), "insertArguments is only applicable to function call nodes.");
int startPosition;
Node argument = n.getSecondChild();
int i = 0;
while (argument != null && i < position) {
argument = argument.getNext();
i++;
}
if (argument == null) {
checkArgument(
position == i, "The specified position must be less than the number of arguments.");
startPosition = n.getSourceOffset() + n.getLength() - 1;
} else {
startPosition = getStartPositionForNodeConsideringComments(argument);
}
String newContent = Joiner.on(", ").join(args);
if (argument != null) {
newContent += ", ";
} else if (i > 0) {
newContent = ", " + newContent;
}
replacements.put(n.getSourceFileName(), CodeReplacement.create(startPosition, 0, newContent));
return this;
}
/**
* Deletes an argument from an existing function call, including any JS doc that precedes it.
* WARNING: If jsdoc erroneously follows the argument, it will not be removed as the parser
* considers the comment to belong to the next argument.
*/
public Builder deleteArgument(Node n, int position) {
checkArgument(
n.isCall() || n.isNew(), "deleteArgument is only applicable to function call nodes.");
// A CALL node's first child is the name of the function being called, and subsequent children
// are the arguments being passed to that function.
int numArguments = n.getChildCount() - 1;
checkState(
numArguments > 0, "deleteArgument() cannot be used on a function call with no arguments");
checkArgument(
position >= 0 && position < numArguments,
"The specified position must be less than the number of arguments.");
Node argument = n.getSecondChild();
// Points at the first position in the code we will remove.
int startOfArgumentToRemove = -1;
// Points one past the last position in the code we will remove.
int endOfArgumentToRemove = -1;
int i = 0;
while (argument != null) {
// If we are removing the first argument, we remove from the start of it (including any
// jsdoc). Otherwise, we remove from the end of the previous argument (to remove the comma
// and any whitespace).
// If we are removing the first argument and it's not the only argument, we remove to the
// beginning of the next argument (to remove the comma and any whitespace). Otherwise we
// remove to the end of the argument.
if (i < position) {
startOfArgumentToRemove = argument.getSourceOffset() + argument.getLength();
} else if (i == position) {
if (position == 0) {
startOfArgumentToRemove = getStartPositionForNodeConsideringComments(argument);
}
endOfArgumentToRemove = argument.getSourceOffset() + argument.getLength();
} else if (i > position) {
if (position == 0) {
endOfArgumentToRemove = argument.getSourceOffset();
}
// We have all the information we need to remove the argument, break early.
break;
}
argument = argument.getNext();
i++;
}
// Remove the argument by replacing it with an empty string.
int lengthOfArgumentToRemove = endOfArgumentToRemove - startOfArgumentToRemove;
replacements.put(
n.getSourceFileName(),
CodeReplacement.create(startOfArgumentToRemove, lengthOfArgumentToRemove, ""));
return this;
}
private static Node createImportNode(
ImportType importType, @Nullable String alias, String namespace) {
final String requireFlavor =
switch (importType) {
case REQUIRE -> "require";
case REQUIRE_TYPE -> "requireType";
};
Node callNode = IR.call(IR.getprop(IR.name("goog"), requireFlavor), IR.string(namespace));
if (alias != null) {
return IR.constNode(IR.name(alias), callNode);
} else {
return IR.exprResult(callNode);
}
}
public Builder addGoogRequire(Match m, String namespace, ScriptMetadata scriptMetadata) {
return addImport(m, namespace, ImportType.REQUIRE, scriptMetadata);
}
public Builder addGoogRequireType(Match m, String namespace, ScriptMetadata scriptMetadata) {
return addImport(m, namespace, ImportType.REQUIRE_TYPE, scriptMetadata);
}
/** Adds a goog.require/requireType for the given namespace if it does not already exist. */
public Builder addImport(
Match m, String namespace, ImportType importType, ScriptMetadata scriptMetadata) {
final String alias;
if (scriptMetadata.supportsRequireAliases()) {
String existingAlias = scriptMetadata.getAlias(namespace);
if (existingAlias != null) {
/*
* Each fix muct be independently valid, so go through the steps of adding a require even
* if one may already exist or have been added by another fix.
*/
alias = existingAlias;
} else if (namespace.indexOf('.') == -1) {
/*
* For unqualified names, the exisiting references will still be valid so long as we keep
* the same name for the alias.
*/
alias = namespace;
} else {
alias =
stream(RequireAliasGenerator.over(namespace))
.filter((a) -> !scriptMetadata.usesName(a))
.findFirst()
.orElseThrow(AssertionError::new);
}
scriptMetadata.addAlias(namespace, alias);
} else {
alias = null;
}
NodeMetadata metadata = m.getMetadata();
Node existingNode = findGoogRequireNode(m.getNode(), metadata, namespace);
if (existingNode != null) {
// TODO(b/139953612): Destructured goog.requires are not supported.
// Add an alias to a naked require if allowed in this file.
if (existingNode.isExprResult() && alias != null) {
Node newNode;
// Replace goog.forwardDeclare with the appropriate alternative
if (NodeUtil.isCallTo(existingNode.getFirstChild(), "goog.forwardDeclare")) {
newNode = createImportNode(importType, alias, namespace);
} else {
newNode = IR.constNode(IR.name(alias), existingNode.getFirstChild().cloneTree());
}
replace(existingNode, newNode, m.getMetadata().getCompiler());
scriptMetadata.addAlias(namespace, alias);
}
return this;
}
// Find the right goog.require node to insert this after.
Node script = scriptMetadata.getScript();
if (script.getFirstChild().isModuleBody()) {
script = script.getFirstChild();
}
Node lastModuleOrProvideNode = null;
Node lastGoogRequireNode = null;
Node nodeToInsertBefore = null;
Node child = script.getFirstChild();
while (child != null) {
if (Matchers.googModule().matches(child, metadata)) {
lastModuleOrProvideNode = child;
}
if (NodeUtil.isExprCall(child)) {
// TODO(mknichel): Replace this logic with a function argument
// Matcher when it exists.
Node grandchild = child.getFirstChild();
if (Matchers.googModuleOrProvide().matches(grandchild, metadata)) {
lastModuleOrProvideNode = grandchild;
} else if (Matchers.googRequirelike().matches(grandchild, metadata)) {
lastGoogRequireNode = grandchild;
if (grandchild.getLastChild().isStringLit()
&& namespace.compareTo(grandchild.getLastChild().getString()) < 0) {
nodeToInsertBefore = child;
break;
}
}
} else if (NodeUtil.isNameDeclaration(child)
&& child.getFirstFirstChild() != null
&& Matchers.googRequirelike().matches(child.getFirstFirstChild(), metadata)) {
lastGoogRequireNode = child.getFirstFirstChild();
String requireName = child.getFirstChild().getString();
String originalName = child.getFirstChild().getOriginalName();
if (originalName != null) {
requireName = originalName;
}
if (alias.compareTo(requireName) < 0) {
nodeToInsertBefore = child;
break;
}
}
child = child.getNext();
}
Node newImportNode = createImportNode(importType, alias, namespace);
if (nodeToInsertBefore == null) {
// The file has goog.provide or goog.require nodes but they come before
// the new goog.require node alphabetically.
if (lastModuleOrProvideNode != null || lastGoogRequireNode != null) {
Node nodeToInsertAfter =
lastGoogRequireNode != null ? lastGoogRequireNode : lastModuleOrProvideNode;
int startPosition =
nodeToInsertAfter.getSourceOffset() + nodeToInsertAfter.getLength() + 2;
replacements.put(
nodeToInsertAfter.getSourceFileName(),
CodeReplacement.create(
startPosition,
0,
generateCode(m.getMetadata().getCompiler(), newImportNode),
namespace));
return this;
} else {
// The file has no goog.provide or goog.require nodes.
if (script.hasChildren()) {
nodeToInsertBefore = script.getFirstChild();
} else {
replacements.put(
script.getSourceFileName(),
CodeReplacement.create(
0, 0, generateCode(m.getMetadata().getCompiler(), newImportNode), namespace));
return this;
}
}
}
return insertBefore(
nodeToInsertBefore, newImportNode, m.getMetadata().getCompiler(), namespace);
}
/**
* Removes a goog.require for the given namespace to the file if it
* already exists.
*/
public Builder removeGoogRequire(Match m, String namespace) {
Node googRequireNode = findGoogRequireNode(m.getNode(), m.getMetadata(), namespace);
if (googRequireNode != null) {
return deleteWithoutRemovingWhitespaceBefore(googRequireNode);
}
return this;
}
/**
* Find the goog.require node for the given namespace (or null if there isn't one). If there is
* more than one:
*
* <ul>
* <li>If there is at least one standalone goog.require, this will return the first standalone
* goog.require.
* <li>If not, this will return the first goog.require.
* </ul>
*/
private static @Nullable Node findGoogRequireNode(
Node n, NodeMetadata metadata, String namespace) {
Node script = metadata.getCompiler().getScriptNode(n.getSourceFileName());
if (script.getFirstChild().isModuleBody()) {
script = script.getFirstChild();
}
for (Node child = script.getFirstChild(); child != null; child = child.getNext()) {
if (NodeUtil.isExprCall(child)
&& Matchers.googRequirelike(namespace).matches(child.getFirstChild(), metadata)) {
return child;
}
}
for (Node child = script.getFirstChild(); child != null; child = child.getNext()) {
if (NodeUtil.isNameDeclaration(child)
// TODO(b/139953612): respect destructured goog.requires
&& !child.getFirstChild().isDestructuringLhs()
&& child.getFirstChild().getLastChild() != null
&& Matchers.googRequirelike(namespace)
.matches(child.getFirstChild().getLastChild(), metadata)) {
return child;
}
}
return null;
}
private static Node subtreeRangeOfIdentifier(Node n) {
checkState(n.isGetProp(), "Support other identifier nodes");
Node leftmost = n;
while (leftmost.hasChildren()) {
leftmost = leftmost.getFirstChild();
}
Node result = IR.empty();
result.setStaticSourceFile(n.getStaticSourceFile());
result.setLinenoCharno(leftmost.getLineno(), leftmost.getCharno());
result.setLength(n.getLength() + (n.getSourceOffset() - leftmost.getSourceOffset()));
return result;
}
public String generateCode(AbstractCompiler compiler, Node node) {
// TODO(mknichel): Fix all the formatting problems with this code.
// How does this play with goog.scope?
if (node.isBlock()) {
// Avoid printing the {}'s
node.setToken(Token.SCRIPT);
}
CompilerOptions compilerOptions = new CompilerOptions();
compilerOptions.setPreferSingleQuotes(true);
compilerOptions.setUseOriginalNamesInOutput(true);
compilerOptions.setPreserveNonJSDocComments(true);
// We're refactoring existing code, so no need to escape values inside strings.
compilerOptions.setTrustedStrings(true);
return new CodePrinter.Builder(node)
.setCompilerOptions(compilerOptions)
.setTypeRegistry(compiler.getTypeRegistry())
.setPrettyPrint(true)
.setLineBreak(true)
.setOutputTypes(true)
.build();
}
public Builder setDescription(String description) {
this.description = description;
return this;
}
public SuggestedFix build() {
return new SuggestedFix(
matchedNodeInfo, replacements.build(), description, alternatives.build());
}
}
/**
* Information about the node that was matched for the suggested fix. This information can be used
* later on when processing the SuggestedFix.
*
* <p>NOTE: Since this class can be retained for a long time when running refactorings over large
* blobs of code, it's important that it does not contain any memory intensive objects in order to
* keep memory to a reasonable amount.
*/
public record MatchedNodeInfo(
String sourceFilename, int lineno, int charno, boolean inClosurizedFile) {
public MatchedNodeInfo {
requireNonNull(sourceFilename, "sourceFilename");
}
@InlineMe(replacement = "this.sourceFilename()")
public String getSourceFilename() {
return sourceFilename();
}
@InlineMe(replacement = "this.lineno()")
public int getLineno() {
return lineno();
}
@InlineMe(replacement = "this.charno()")
public int getCharno() {
return charno();
}
@InlineMe(replacement = "this.inClosurizedFile()")
public boolean isInClosurizedFile() {
return inClosurizedFile();
}
static MatchedNodeInfo create(Node node, boolean closurized) {
return new MatchedNodeInfo(
NodeUtil.getSourceName(node), node.getLineno(), node.getCharno(), closurized);
}
}
/**
* Helper function to return the source offset of this node considering that JSDoc comments,
* non-JDDoc comments, or both may or may not be attached.
*/
private static int getStartPositionForNodeConsideringComments(Node node) {
JSDocInfo jsdoc = NodeUtil.getBestJSDocInfo(node);
NonJSDocComment associatedNonJSDocComment = node.getNonJSDocComment();
int start = node.getSourceOffset();
if (jsdoc != null) {
start = jsdoc.getOriginalCommentPosition();
}
if (associatedNonJSDocComment != null) {
start = min(start, associatedNonJSDocComment.getStartPosition().getOffset());
}
return start;
}
}
|
googleapis/google-cloud-java | 35,895 | java-translate/proto-google-cloud-translate-v3/src/main/java/com/google/cloud/translate/v3/DocumentTranslation.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/translate/v3/translation_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.translate.v3;
/**
*
*
* <pre>
* A translated document message.
* </pre>
*
* Protobuf type {@code google.cloud.translation.v3.DocumentTranslation}
*/
public final class DocumentTranslation extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.translation.v3.DocumentTranslation)
DocumentTranslationOrBuilder {
private static final long serialVersionUID = 0L;
// Use DocumentTranslation.newBuilder() to construct.
private DocumentTranslation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DocumentTranslation() {
byteStreamOutputs_ = emptyList(com.google.protobuf.ByteString.class);
mimeType_ = "";
detectedLanguageCode_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DocumentTranslation();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.translate.v3.TranslationServiceProto
.internal_static_google_cloud_translation_v3_DocumentTranslation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.translate.v3.TranslationServiceProto
.internal_static_google_cloud_translation_v3_DocumentTranslation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.translate.v3.DocumentTranslation.class,
com.google.cloud.translate.v3.DocumentTranslation.Builder.class);
}
public static final int BYTE_STREAM_OUTPUTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private com.google.protobuf.Internal.ProtobufList<com.google.protobuf.ByteString>
byteStreamOutputs_ = emptyList(com.google.protobuf.ByteString.class);
/**
*
*
* <pre>
* The array of translated documents. It is expected to be size 1 for now. We
* may produce multiple translated documents in the future for other type of
* file formats.
* </pre>
*
* <code>repeated bytes byte_stream_outputs = 1;</code>
*
* @return A list containing the byteStreamOutputs.
*/
@java.lang.Override
public java.util.List<com.google.protobuf.ByteString> getByteStreamOutputsList() {
return byteStreamOutputs_;
}
/**
*
*
* <pre>
* The array of translated documents. It is expected to be size 1 for now. We
* may produce multiple translated documents in the future for other type of
* file formats.
* </pre>
*
* <code>repeated bytes byte_stream_outputs = 1;</code>
*
* @return The count of byteStreamOutputs.
*/
public int getByteStreamOutputsCount() {
return byteStreamOutputs_.size();
}
/**
*
*
* <pre>
* The array of translated documents. It is expected to be size 1 for now. We
* may produce multiple translated documents in the future for other type of
* file formats.
* </pre>
*
* <code>repeated bytes byte_stream_outputs = 1;</code>
*
* @param index The index of the element to return.
* @return The byteStreamOutputs at the given index.
*/
public com.google.protobuf.ByteString getByteStreamOutputs(int index) {
return byteStreamOutputs_.get(index);
}
public static final int MIME_TYPE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object mimeType_ = "";
/**
*
*
* <pre>
* The translated document's mime type.
* </pre>
*
* <code>string mime_type = 2;</code>
*
* @return The mimeType.
*/
@java.lang.Override
public java.lang.String getMimeType() {
java.lang.Object ref = mimeType_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
mimeType_ = s;
return s;
}
}
/**
*
*
* <pre>
* The translated document's mime type.
* </pre>
*
* <code>string mime_type = 2;</code>
*
* @return The bytes for mimeType.
*/
@java.lang.Override
public com.google.protobuf.ByteString getMimeTypeBytes() {
java.lang.Object ref = mimeType_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
mimeType_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DETECTED_LANGUAGE_CODE_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object detectedLanguageCode_ = "";
/**
*
*
* <pre>
* The detected language for the input document.
* If the user did not provide the source language for the input document,
* this field will have the language code automatically detected. If the
* source language was passed, auto-detection of the language does not occur
* and this field is empty.
* </pre>
*
* <code>string detected_language_code = 3;</code>
*
* @return The detectedLanguageCode.
*/
@java.lang.Override
public java.lang.String getDetectedLanguageCode() {
java.lang.Object ref = detectedLanguageCode_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
detectedLanguageCode_ = s;
return s;
}
}
/**
*
*
* <pre>
* The detected language for the input document.
* If the user did not provide the source language for the input document,
* this field will have the language code automatically detected. If the
* source language was passed, auto-detection of the language does not occur
* and this field is empty.
* </pre>
*
* <code>string detected_language_code = 3;</code>
*
* @return The bytes for detectedLanguageCode.
*/
@java.lang.Override
public com.google.protobuf.ByteString getDetectedLanguageCodeBytes() {
java.lang.Object ref = detectedLanguageCode_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
detectedLanguageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < byteStreamOutputs_.size(); i++) {
output.writeBytes(1, byteStreamOutputs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(mimeType_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, mimeType_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(detectedLanguageCode_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, detectedLanguageCode_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < byteStreamOutputs_.size(); i++) {
dataSize +=
com.google.protobuf.CodedOutputStream.computeBytesSizeNoTag(byteStreamOutputs_.get(i));
}
size += dataSize;
size += 1 * getByteStreamOutputsList().size();
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(mimeType_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, mimeType_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(detectedLanguageCode_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, detectedLanguageCode_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.translate.v3.DocumentTranslation)) {
return super.equals(obj);
}
com.google.cloud.translate.v3.DocumentTranslation other =
(com.google.cloud.translate.v3.DocumentTranslation) obj;
if (!getByteStreamOutputsList().equals(other.getByteStreamOutputsList())) return false;
if (!getMimeType().equals(other.getMimeType())) return false;
if (!getDetectedLanguageCode().equals(other.getDetectedLanguageCode())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getByteStreamOutputsCount() > 0) {
hash = (37 * hash) + BYTE_STREAM_OUTPUTS_FIELD_NUMBER;
hash = (53 * hash) + getByteStreamOutputsList().hashCode();
}
hash = (37 * hash) + MIME_TYPE_FIELD_NUMBER;
hash = (53 * hash) + getMimeType().hashCode();
hash = (37 * hash) + DETECTED_LANGUAGE_CODE_FIELD_NUMBER;
hash = (53 * hash) + getDetectedLanguageCode().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.translate.v3.DocumentTranslation parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.translate.v3.DocumentTranslation parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.translate.v3.DocumentTranslation parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.translate.v3.DocumentTranslation parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.translate.v3.DocumentTranslation parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.translate.v3.DocumentTranslation parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.translate.v3.DocumentTranslation parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.translate.v3.DocumentTranslation parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.translate.v3.DocumentTranslation parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.translate.v3.DocumentTranslation parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.translate.v3.DocumentTranslation parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.translate.v3.DocumentTranslation parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.translate.v3.DocumentTranslation prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A translated document message.
* </pre>
*
* Protobuf type {@code google.cloud.translation.v3.DocumentTranslation}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.translation.v3.DocumentTranslation)
com.google.cloud.translate.v3.DocumentTranslationOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.translate.v3.TranslationServiceProto
.internal_static_google_cloud_translation_v3_DocumentTranslation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.translate.v3.TranslationServiceProto
.internal_static_google_cloud_translation_v3_DocumentTranslation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.translate.v3.DocumentTranslation.class,
com.google.cloud.translate.v3.DocumentTranslation.Builder.class);
}
// Construct using com.google.cloud.translate.v3.DocumentTranslation.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
byteStreamOutputs_ = emptyList(com.google.protobuf.ByteString.class);
mimeType_ = "";
detectedLanguageCode_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.translate.v3.TranslationServiceProto
.internal_static_google_cloud_translation_v3_DocumentTranslation_descriptor;
}
@java.lang.Override
public com.google.cloud.translate.v3.DocumentTranslation getDefaultInstanceForType() {
return com.google.cloud.translate.v3.DocumentTranslation.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.translate.v3.DocumentTranslation build() {
com.google.cloud.translate.v3.DocumentTranslation result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.translate.v3.DocumentTranslation buildPartial() {
com.google.cloud.translate.v3.DocumentTranslation result =
new com.google.cloud.translate.v3.DocumentTranslation(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.translate.v3.DocumentTranslation result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
byteStreamOutputs_.makeImmutable();
result.byteStreamOutputs_ = byteStreamOutputs_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.mimeType_ = mimeType_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.detectedLanguageCode_ = detectedLanguageCode_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.translate.v3.DocumentTranslation) {
return mergeFrom((com.google.cloud.translate.v3.DocumentTranslation) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.translate.v3.DocumentTranslation other) {
if (other == com.google.cloud.translate.v3.DocumentTranslation.getDefaultInstance())
return this;
if (!other.byteStreamOutputs_.isEmpty()) {
if (byteStreamOutputs_.isEmpty()) {
byteStreamOutputs_ = other.byteStreamOutputs_;
byteStreamOutputs_.makeImmutable();
bitField0_ |= 0x00000001;
} else {
ensureByteStreamOutputsIsMutable();
byteStreamOutputs_.addAll(other.byteStreamOutputs_);
}
onChanged();
}
if (!other.getMimeType().isEmpty()) {
mimeType_ = other.mimeType_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getDetectedLanguageCode().isEmpty()) {
detectedLanguageCode_ = other.detectedLanguageCode_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.protobuf.ByteString v = input.readBytes();
ensureByteStreamOutputsIsMutable();
byteStreamOutputs_.add(v);
break;
} // case 10
case 18:
{
mimeType_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
detectedLanguageCode_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.Internal.ProtobufList<com.google.protobuf.ByteString>
byteStreamOutputs_ = emptyList(com.google.protobuf.ByteString.class);
private void ensureByteStreamOutputsIsMutable() {
if (!byteStreamOutputs_.isModifiable()) {
byteStreamOutputs_ = makeMutableCopy(byteStreamOutputs_);
}
bitField0_ |= 0x00000001;
}
/**
*
*
* <pre>
* The array of translated documents. It is expected to be size 1 for now. We
* may produce multiple translated documents in the future for other type of
* file formats.
* </pre>
*
* <code>repeated bytes byte_stream_outputs = 1;</code>
*
* @return A list containing the byteStreamOutputs.
*/
public java.util.List<com.google.protobuf.ByteString> getByteStreamOutputsList() {
byteStreamOutputs_.makeImmutable();
return byteStreamOutputs_;
}
/**
*
*
* <pre>
* The array of translated documents. It is expected to be size 1 for now. We
* may produce multiple translated documents in the future for other type of
* file formats.
* </pre>
*
* <code>repeated bytes byte_stream_outputs = 1;</code>
*
* @return The count of byteStreamOutputs.
*/
public int getByteStreamOutputsCount() {
return byteStreamOutputs_.size();
}
/**
*
*
* <pre>
* The array of translated documents. It is expected to be size 1 for now. We
* may produce multiple translated documents in the future for other type of
* file formats.
* </pre>
*
* <code>repeated bytes byte_stream_outputs = 1;</code>
*
* @param index The index of the element to return.
* @return The byteStreamOutputs at the given index.
*/
public com.google.protobuf.ByteString getByteStreamOutputs(int index) {
return byteStreamOutputs_.get(index);
}
/**
*
*
* <pre>
* The array of translated documents. It is expected to be size 1 for now. We
* may produce multiple translated documents in the future for other type of
* file formats.
* </pre>
*
* <code>repeated bytes byte_stream_outputs = 1;</code>
*
* @param index The index to set the value at.
* @param value The byteStreamOutputs to set.
* @return This builder for chaining.
*/
public Builder setByteStreamOutputs(int index, com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureByteStreamOutputsIsMutable();
byteStreamOutputs_.set(index, value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The array of translated documents. It is expected to be size 1 for now. We
* may produce multiple translated documents in the future for other type of
* file formats.
* </pre>
*
* <code>repeated bytes byte_stream_outputs = 1;</code>
*
* @param value The byteStreamOutputs to add.
* @return This builder for chaining.
*/
public Builder addByteStreamOutputs(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureByteStreamOutputsIsMutable();
byteStreamOutputs_.add(value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The array of translated documents. It is expected to be size 1 for now. We
* may produce multiple translated documents in the future for other type of
* file formats.
* </pre>
*
* <code>repeated bytes byte_stream_outputs = 1;</code>
*
* @param values The byteStreamOutputs to add.
* @return This builder for chaining.
*/
public Builder addAllByteStreamOutputs(
java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
ensureByteStreamOutputsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, byteStreamOutputs_);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The array of translated documents. It is expected to be size 1 for now. We
* may produce multiple translated documents in the future for other type of
* file formats.
* </pre>
*
* <code>repeated bytes byte_stream_outputs = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearByteStreamOutputs() {
byteStreamOutputs_ = emptyList(com.google.protobuf.ByteString.class);
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
private java.lang.Object mimeType_ = "";
/**
*
*
* <pre>
* The translated document's mime type.
* </pre>
*
* <code>string mime_type = 2;</code>
*
* @return The mimeType.
*/
public java.lang.String getMimeType() {
java.lang.Object ref = mimeType_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
mimeType_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The translated document's mime type.
* </pre>
*
* <code>string mime_type = 2;</code>
*
* @return The bytes for mimeType.
*/
public com.google.protobuf.ByteString getMimeTypeBytes() {
java.lang.Object ref = mimeType_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
mimeType_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The translated document's mime type.
* </pre>
*
* <code>string mime_type = 2;</code>
*
* @param value The mimeType to set.
* @return This builder for chaining.
*/
public Builder setMimeType(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
mimeType_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The translated document's mime type.
* </pre>
*
* <code>string mime_type = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearMimeType() {
mimeType_ = getDefaultInstance().getMimeType();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The translated document's mime type.
* </pre>
*
* <code>string mime_type = 2;</code>
*
* @param value The bytes for mimeType to set.
* @return This builder for chaining.
*/
public Builder setMimeTypeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
mimeType_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object detectedLanguageCode_ = "";
/**
*
*
* <pre>
* The detected language for the input document.
* If the user did not provide the source language for the input document,
* this field will have the language code automatically detected. If the
* source language was passed, auto-detection of the language does not occur
* and this field is empty.
* </pre>
*
* <code>string detected_language_code = 3;</code>
*
* @return The detectedLanguageCode.
*/
public java.lang.String getDetectedLanguageCode() {
java.lang.Object ref = detectedLanguageCode_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
detectedLanguageCode_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The detected language for the input document.
* If the user did not provide the source language for the input document,
* this field will have the language code automatically detected. If the
* source language was passed, auto-detection of the language does not occur
* and this field is empty.
* </pre>
*
* <code>string detected_language_code = 3;</code>
*
* @return The bytes for detectedLanguageCode.
*/
public com.google.protobuf.ByteString getDetectedLanguageCodeBytes() {
java.lang.Object ref = detectedLanguageCode_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
detectedLanguageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The detected language for the input document.
* If the user did not provide the source language for the input document,
* this field will have the language code automatically detected. If the
* source language was passed, auto-detection of the language does not occur
* and this field is empty.
* </pre>
*
* <code>string detected_language_code = 3;</code>
*
* @param value The detectedLanguageCode to set.
* @return This builder for chaining.
*/
public Builder setDetectedLanguageCode(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
detectedLanguageCode_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The detected language for the input document.
* If the user did not provide the source language for the input document,
* this field will have the language code automatically detected. If the
* source language was passed, auto-detection of the language does not occur
* and this field is empty.
* </pre>
*
* <code>string detected_language_code = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearDetectedLanguageCode() {
detectedLanguageCode_ = getDefaultInstance().getDetectedLanguageCode();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The detected language for the input document.
* If the user did not provide the source language for the input document,
* this field will have the language code automatically detected. If the
* source language was passed, auto-detection of the language does not occur
* and this field is empty.
* </pre>
*
* <code>string detected_language_code = 3;</code>
*
* @param value The bytes for detectedLanguageCode to set.
* @return This builder for chaining.
*/
public Builder setDetectedLanguageCodeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
detectedLanguageCode_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.translation.v3.DocumentTranslation)
}
// @@protoc_insertion_point(class_scope:google.cloud.translation.v3.DocumentTranslation)
private static final com.google.cloud.translate.v3.DocumentTranslation DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.translate.v3.DocumentTranslation();
}
public static com.google.cloud.translate.v3.DocumentTranslation getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DocumentTranslation> PARSER =
new com.google.protobuf.AbstractParser<DocumentTranslation>() {
@java.lang.Override
public DocumentTranslation parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DocumentTranslation> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DocumentTranslation> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.translate.v3.DocumentTranslation getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/graal | 36,199 | tools/src/com.oracle.truffle.tools.dap/src/com/oracle/truffle/tools/dap/server/DebugProtocolServerImpl.java | /*
* Copyright (c) 2020, 2024, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.truffle.tools.dap.server;
import com.oracle.truffle.api.CompilerDirectives;
import com.oracle.truffle.api.InstrumentInfo;
import com.oracle.truffle.api.TruffleContext;
import com.oracle.truffle.api.debug.DebugException;
import com.oracle.truffle.api.debug.DebugValue;
import com.oracle.truffle.api.debug.Debugger;
import com.oracle.truffle.api.debug.DebuggerSession;
import com.oracle.truffle.api.debug.SourceElement;
import com.oracle.truffle.api.debug.StepConfig;
import com.oracle.truffle.api.debug.SuspendAnchor;
import com.oracle.truffle.api.debug.SuspendedCallback;
import com.oracle.truffle.api.debug.SuspendedEvent;
import com.oracle.truffle.api.debug.SuspensionFilter;
import com.oracle.truffle.api.instrumentation.ContextsListener;
import com.oracle.truffle.api.instrumentation.EventBinding;
import com.oracle.truffle.api.instrumentation.TruffleInstrument;
import com.oracle.truffle.api.nodes.LanguageInfo;
import com.oracle.truffle.api.source.Source;
import com.oracle.truffle.api.source.SourceSection;
import com.oracle.truffle.tools.dap.instrument.Enabler;
import com.oracle.truffle.tools.dap.instrument.OutputConsumerInstrument;
import com.oracle.truffle.tools.dap.types.AttachRequestArguments;
import com.oracle.truffle.tools.dap.types.BreakpointLocationsArguments;
import com.oracle.truffle.tools.dap.types.BreakpointLocationsResponse;
import com.oracle.truffle.tools.dap.types.Capabilities;
import com.oracle.truffle.tools.dap.types.ConfigurationDoneArguments;
import com.oracle.truffle.tools.dap.types.ContinueArguments;
import com.oracle.truffle.tools.dap.types.ContinueResponse;
import com.oracle.truffle.tools.dap.types.DebugProtocolClient;
import com.oracle.truffle.tools.dap.types.DebugProtocolServer;
import com.oracle.truffle.tools.dap.types.DisconnectArguments;
import com.oracle.truffle.tools.dap.types.EvaluateArguments;
import com.oracle.truffle.tools.dap.types.EvaluateResponse;
import com.oracle.truffle.tools.dap.types.ExceptionBreakpointsFilter;
import com.oracle.truffle.tools.dap.types.ExceptionInfoArguments;
import com.oracle.truffle.tools.dap.types.ExceptionInfoResponse;
import com.oracle.truffle.tools.dap.types.InitializeRequestArguments;
import com.oracle.truffle.tools.dap.types.LaunchRequestArguments;
import com.oracle.truffle.tools.dap.types.LoadedSourcesArguments;
import com.oracle.truffle.tools.dap.types.LoadedSourcesResponse;
import com.oracle.truffle.tools.dap.types.NextArguments;
import com.oracle.truffle.tools.dap.types.OutputEvent;
import com.oracle.truffle.tools.dap.types.PauseArguments;
import com.oracle.truffle.tools.dap.types.Scope;
import com.oracle.truffle.tools.dap.types.ScopesArguments;
import com.oracle.truffle.tools.dap.types.ScopesResponse;
import com.oracle.truffle.tools.dap.types.SetBreakpointsArguments;
import com.oracle.truffle.tools.dap.types.SetBreakpointsResponse;
import com.oracle.truffle.tools.dap.types.SetExceptionBreakpointsArguments;
import com.oracle.truffle.tools.dap.types.SetFunctionBreakpointsArguments;
import com.oracle.truffle.tools.dap.types.SetFunctionBreakpointsResponse;
import com.oracle.truffle.tools.dap.types.SetVariableArguments;
import com.oracle.truffle.tools.dap.types.SetVariableResponse;
import com.oracle.truffle.tools.dap.types.SourceArguments;
import com.oracle.truffle.tools.dap.types.SourceResponse;
import com.oracle.truffle.tools.dap.types.StackFrame;
import com.oracle.truffle.tools.dap.types.StackTraceArguments;
import com.oracle.truffle.tools.dap.types.StackTraceResponse;
import com.oracle.truffle.tools.dap.types.StepInArguments;
import com.oracle.truffle.tools.dap.types.StepOutArguments;
import com.oracle.truffle.tools.dap.types.TerminatedEvent;
import com.oracle.truffle.tools.dap.types.ThreadsResponse;
import com.oracle.truffle.tools.dap.types.Variable;
import com.oracle.truffle.tools.dap.types.VariablesArguments;
import com.oracle.truffle.tools.dap.types.VariablesResponse;
import org.graalvm.shadowed.org.json.JSONArray;
import org.graalvm.shadowed.org.json.JSONObject;
import java.io.IOException;
import java.lang.ref.Reference;
import java.lang.ref.WeakReference;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketException;
import java.net.URI;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.Future;
import java.util.concurrent.Phaser;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import java.util.logging.Level;
/**
* A {@link DebugProtocolServer} implementation using TCP sockets as transportation layer for the
* JSON-RPC requests.
*/
public final class DebugProtocolServerImpl extends DebugProtocolServer {
private static final StepConfig STEP_CONFIG = StepConfig.newBuilder().suspendAnchors(SourceElement.ROOT, SuspendAnchor.AFTER).build();
private final ExecutionContext context;
private volatile DebugProtocolClient client;
private volatile DebuggerSession debuggerSession;
private final Enabler ioEnabler;
private volatile boolean launched; // true when launched, false when attached
private volatile boolean disposed = false;
private final List<Runnable> runOnDispose = new CopyOnWriteArrayList<>();
private final List<URI> sourcePath;
private volatile OneTimeExecutor clientConnectionExecutor;
private DebugProtocolServerImpl(ExecutionContext context, final boolean debugBreak, final boolean waitAttached, @SuppressWarnings("unused") final boolean inspectInitialization,
final List<URI> sourcePath) {
this.context = context;
this.sourcePath = sourcePath;
if (debugBreak) {
debuggerSession = startDebuggerSession();
context.initSession(debuggerSession);
debuggerSession.suspendNextExecution();
}
if (debugBreak || waitAttached) {
final AtomicReference<EventBinding<?>> execEnter = new AtomicReference<>();
final AtomicBoolean disposeBinding = new AtomicBoolean(false);
execEnter.set(context.getEnv().getInstrumenter().attachContextsListener(new ContextsListener() {
@Override
public void onContextCreated(TruffleContext ctx) {
}
@Override
public void onLanguageContextCreated(TruffleContext ctx, LanguageInfo language) {
if (inspectInitialization) {
waitForRunPermission();
}
}
@Override
public void onLanguageContextInitialized(TruffleContext ctx, LanguageInfo language) {
if (!inspectInitialization) {
waitForRunPermission();
}
}
@Override
public void onLanguageContextFinalized(TruffleContext ctx, LanguageInfo language) {
}
@Override
public void onLanguageContextDisposed(TruffleContext ctx, LanguageInfo language) {
}
@Override
public void onContextClosed(TruffleContext ctx) {
}
@CompilerDirectives.TruffleBoundary
private void waitForRunPermission() {
try {
context.waitForRunPermission();
} catch (InterruptedException ex) {
}
final EventBinding<?> binding = execEnter.getAndSet(null);
if (binding != null) {
binding.dispose();
} else {
disposeBinding.set(true);
}
}
}, true));
if (disposeBinding.get()) {
execEnter.get().dispose();
}
}
InstrumentInfo instrumentInfo = context.getEnv().getInstruments().get(OutputConsumerInstrument.ID);
ioEnabler = context.getEnv().lookup(instrumentInfo, Enabler.class);
ioEnabler.enable();
OutputHandler oh = context.getEnv().lookup(instrumentInfo, OutputHandler.Provider.class).getOutputHandler();
ConsoleOutputListener outL = new ConsoleOutputListener("stdout");
ConsoleOutputListener errL = new ConsoleOutputListener("stderr");
oh.setOutListener(outL);
oh.setErrListener(errL);
}
public static DebugProtocolServerImpl create(ExecutionContext context, final boolean debugBreak, final boolean waitAttached, final boolean inspectInitialization, final List<URI> sourcePath) {
return new DebugProtocolServerImpl(context, debugBreak, waitAttached, inspectInitialization, sourcePath);
}
@Override
public CompletableFuture<Capabilities> initialize(InitializeRequestArguments args) {
context.setLinesStartAt1(args.getLinesStartAt1());
context.setColumnsStartAt1(args.getColumnsStartAt1());
ExceptionBreakpointsFilter[] exceptionBreakpointFilters = new ExceptionBreakpointsFilter[]{
ExceptionBreakpointsFilter.create("all", "All Exceptions"),
ExceptionBreakpointsFilter.create("uncaught", "Uncaught Exceptions")
};
final CompletableFuture<Capabilities> future = CompletableFuture.completedFuture(Capabilities.create() //
.setExceptionBreakpointFilters(Arrays.asList(exceptionBreakpointFilters)) //
.setSupportsConfigurationDoneRequest(true) //
.setSupportsFunctionBreakpoints(true) //
.setSupportsConditionalBreakpoints(true) //
.setSupportsHitConditionalBreakpoints(true) //
.setSupportsSetVariable(true) //
.setSupportsExceptionInfoRequest(true) //
.setSupportsLoadedSourcesRequest(true) //
.setSupportsLogPoints(true) //
.setSupportsBreakpointLocationsRequest(true));
future.thenRunAsync(() -> {
client.initialized();
});
return future;
}
@Override
public CompletableFuture<Void> configurationDone(ConfigurationDoneArguments args) {
return CompletableFuture.runAsync(() -> context.doRunIfWaitingForDebugger());
}
@Override
public CompletableFuture<Void> launch(LaunchRequestArguments args) {
return CompletableFuture.runAsync(() -> {
JSONObject info = (JSONObject) args.get("graalVMLaunchInfo");
if (info != null) {
StringBuilder sb = new StringBuilder(info.getString("exec"));
JSONArray argsInfo = info.getJSONArray("args");
for (int i = 0; i < argsInfo.length(); i++) {
sb.append(' ').append(argsInfo.getString(i));
}
client.output(OutputEvent.EventBody.create(sb.toString()));
}
client.output(OutputEvent.EventBody.create("Debugger attached.").setCategory("stderr"));
launched = true;
});
}
@Override
public CompletableFuture<Void> attach(AttachRequestArguments args) {
return CompletableFuture.runAsync(() -> {
client.output(OutputEvent.EventBody.create("Debugger attached.").setCategory("stderr"));
});
}
@Override
public CompletableFuture<Void> disconnect(DisconnectArguments args, Consumer<? super Void> responseConsumer) {
return CompletableFuture.runAsync(() -> {
DebuggerSession session;
synchronized (DebugProtocolServerImpl.this) {
disposed = true;
session = debuggerSession;
debuggerSession = null;
}
ioEnabler.disable();
if (session != null) {
session.close();
}
context.dispose();
responseConsumer.accept(null);
if (launched) {
// Cancel all contexts to terminate the execution
AllContextsCancel cancel = new AllContextsCancel();
EventBinding<ContextsListener> binding = context.getEnv().getInstrumenter().attachContextsListener(cancel, true);
cancel.waitForAllCanceled();
binding.dispose();
}
});
}
public void dispose() {
if (disposed) {
return;
}
disposed = true;
DebugProtocolClient theClient = client;
if (theClient != null) {
theClient.terminated(TerminatedEvent.EventBody.create());
}
for (Runnable r : runOnDispose) {
r.run();
}
clientConnectionExecutor.shutDownAndJoin();
}
private void onDispose(Runnable r) {
runOnDispose.add(r);
}
@Override
public CompletableFuture<BreakpointLocationsResponse.ResponseBody> breakpointLocations(BreakpointLocationsArguments args) {
return CompletableFuture.completedFuture(BreakpointLocationsResponse.ResponseBody.create(context.getBreakpointsHandler().breakpointLocations(args)));
}
@Override
public CompletableFuture<SetBreakpointsResponse.ResponseBody> setBreakpoints(SetBreakpointsArguments args) {
try {
return CompletableFuture.completedFuture(SetBreakpointsResponse.ResponseBody.create(context.getBreakpointsHandler().setBreakpoints(args)));
} catch (ExceptionWithMessage ex) {
return CompletableFuture.failedFuture(ex);
}
}
@Override
public CompletableFuture<SetFunctionBreakpointsResponse.ResponseBody> setFunctionBreakpoints(SetFunctionBreakpointsArguments args) {
return CompletableFuture.completedFuture(SetFunctionBreakpointsResponse.ResponseBody.create(context.getBreakpointsHandler().setFunctionBreakpoints(args)));
}
@Override
public CompletableFuture<Void> setExceptionBreakpoints(SetExceptionBreakpointsArguments args) {
if (args.getFilters().indexOf("all") >= 0) {
context.getBreakpointsHandler().setExceptionBreakpoint(true, true);
} else if (args.getFilters().indexOf("uncaught") >= 0) {
context.getBreakpointsHandler().setExceptionBreakpoint(false, true);
} else {
context.getBreakpointsHandler().setExceptionBreakpoint(false, false);
}
return CompletableFuture.completedFuture(null);
}
@Override
public CompletableFuture<ContinueResponse.ResponseBody> doContinue(ContinueArguments args, Consumer<? super ContinueResponse.ResponseBody> responseConsumer) {
CompletableFuture<ContinueResponse.ResponseBody> future = new CompletableFuture<>();
context.getThreadsHandler().executeInSuspendedThread(args.getThreadId(), (info) -> {
if (info == null) {
future.completeExceptionally(Errors.invalidThread(args.getThreadId()));
return false;
}
info.getSuspendedEvent().prepareContinue();
ContinueResponse.ResponseBody response = ContinueResponse.ResponseBody.create().setAllThreadsContinued(false);
responseConsumer.accept(response);
future.complete(response);
return true;
});
return future;
}
@Override
public CompletableFuture<Void> next(NextArguments args, Consumer<? super Void> responseConsumer) {
CompletableFuture<Void> future = new CompletableFuture<>();
context.getThreadsHandler().executeInSuspendedThread(args.getThreadId(), (info) -> {
if (info == null) {
future.completeExceptionally(Errors.invalidThread(args.getThreadId()));
return false;
}
info.getSuspendedEvent().prepareStepOver(STEP_CONFIG);
responseConsumer.accept(null);
future.complete(null);
return true;
});
return future;
}
@Override
public CompletableFuture<Void> stepIn(StepInArguments args, Consumer<? super Void> responseConsumer) {
CompletableFuture<Void> future = new CompletableFuture<>();
context.getThreadsHandler().executeInSuspendedThread(args.getThreadId(), (info) -> {
if (info == null) {
future.completeExceptionally(Errors.invalidThread(args.getThreadId()));
return false;
}
info.getSuspendedEvent().prepareStepInto(STEP_CONFIG);
responseConsumer.accept(null);
future.complete(null);
return true;
});
return future;
}
@Override
public CompletableFuture<Void> stepOut(StepOutArguments args, Consumer<? super Void> responseConsumer) {
CompletableFuture<Void> future = new CompletableFuture<>();
context.getThreadsHandler().executeInSuspendedThread(args.getThreadId(), (info) -> {
if (info == null) {
future.completeExceptionally(Errors.invalidThread(args.getThreadId()));
return false;
}
info.getSuspendedEvent().prepareStepOut(STEP_CONFIG);
responseConsumer.accept(null);
future.complete(null);
return true;
});
return future;
}
@Override
public CompletableFuture<Void> pause(PauseArguments args) {
CompletableFuture<Void> future = new CompletableFuture<>();
if (context.getThreadsHandler().pause(args.getThreadId())) {
future.complete(null);
} else {
future.completeExceptionally(Errors.invalidThread(args.getThreadId()));
}
return future;
}
@Override
public CompletableFuture<StackTraceResponse.ResponseBody> stackTrace(StackTraceArguments args) {
CompletableFuture<StackTraceResponse.ResponseBody> future = new CompletableFuture<>();
context.getThreadsHandler().executeInSuspendedThread(args.getThreadId(), (info) -> {
if (info == null) {
future.completeExceptionally(Errors.noCallStackAvailable());
} else {
List<StackFrame> stackTrace = context.getStackFramesHandler().getStackTrace(info);
int startIdx = args.getStartFrame() != null ? args.getStartFrame() : 0;
int endIdx = startIdx + (args.getLevels() != null ? args.getLevels() : stackTrace.size());
if (startIdx > 0 || endIdx < stackTrace.size()) {
stackTrace = stackTrace.subList(startIdx, endIdx);
}
future.complete(StackTraceResponse.ResponseBody.create(stackTrace).setTotalFrames(stackTrace.size()));
}
return false;
});
return future;
}
@Override
public CompletableFuture<ScopesResponse.ResponseBody> scopes(ScopesArguments args) {
CompletableFuture<ScopesResponse.ResponseBody> future = new CompletableFuture<>();
context.getThreadsHandler().executeInSuspendedThread(args.getFrameId(), (info) -> {
List<Scope> scopes = info != null ? context.getStackFramesHandler().getScopes(info, args.getFrameId()) : null;
if (scopes == null) {
future.completeExceptionally(Errors.stackFrameNotValid());
} else {
future.complete(ScopesResponse.ResponseBody.create(scopes));
}
return false;
});
return future;
}
@Override
public CompletableFuture<VariablesResponse.ResponseBody> variables(VariablesArguments args) {
CompletableFuture<VariablesResponse.ResponseBody> future = new CompletableFuture<>();
context.getThreadsHandler().executeInSuspendedThread(args.getVariablesReference(), (info) -> {
List<Variable> variables = info != null ? context.getVariablesHandler().getVariables(info, args) : null;
if (variables == null) {
variables = Collections.emptyList();
}
future.complete(VariablesResponse.ResponseBody.create(variables));
return false;
});
return future;
}
@Override
public CompletableFuture<SetVariableResponse.ResponseBody> setVariable(SetVariableArguments args) {
CompletableFuture<SetVariableResponse.ResponseBody> future = new CompletableFuture<>();
context.getThreadsHandler().executeInSuspendedThread(args.getVariablesReference(), (info) -> {
try {
Variable var = info != null ? VariablesHandler.setVariable(info, args) : null;
if (var == null) {
future.completeExceptionally(Errors.setValueNotSupported());
} else {
future.complete(SetVariableResponse.ResponseBody.create(var.getValue()).setType(var.getType()).setVariablesReference(var.getVariablesReference()).setIndexedVariables(
var.getIndexedVariables()).setNamedVariables(var.getNamedVariables()));
}
} catch (Exception e) {
future.completeExceptionally(Errors.errorFromEvaluate(e.getMessage()));
}
return false;
});
return future;
}
@Override
public CompletableFuture<SourceResponse.ResponseBody> source(SourceArguments args) {
CompletableFuture<SourceResponse.ResponseBody> future = new CompletableFuture<>();
Source source;
Integer sourceReference = args.getSource().getSourceReference();
if (sourceReference != null && sourceReference > 0) {
source = context.getLoadedSourcesHandler().getSource(sourceReference);
} else {
source = context.getLoadedSourcesHandler().getSource(args.getSource().getPath());
}
if (source == null) {
future.completeExceptionally(Errors.sourceRequestIllegalHandle());
} else if (!source.hasCharacters()) {
future.completeExceptionally(Errors.sourceRequestCouldNotRetrieveContent());
} else {
future.complete(SourceResponse.ResponseBody.create(source.getCharacters().toString()).setMimeType(source.getMimeType()));
}
return future;
}
@Override
public CompletableFuture<ThreadsResponse.ResponseBody> threads() {
return CompletableFuture.completedFuture(ThreadsResponse.ResponseBody.create(context.getThreadsHandler().getThreads()));
}
@Override
public CompletableFuture<LoadedSourcesResponse.ResponseBody> loadedSources(LoadedSourcesArguments args) {
return CompletableFuture.completedFuture(LoadedSourcesResponse.ResponseBody.create(context.getLoadedSourcesHandler().getLoadedSources()));
}
@Override
public CompletableFuture<EvaluateResponse.ResponseBody> evaluate(EvaluateArguments args) {
CompletableFuture<EvaluateResponse.ResponseBody> future = new CompletableFuture<>();
Integer frameId = args.getFrameId();
context.getThreadsHandler().executeInSuspendedThread(frameId != null ? frameId : 0, (info) -> {
if (info == null) {
future.completeExceptionally(Errors.stackFrameNotValid());
} else {
try {
Variable var = StackFramesHandler.evaluateOnStackFrame(info, args.getFrameId(), args.getExpression());
if (var == null) {
future.completeExceptionally(Errors.stackFrameNotValid());
} else {
future.complete(EvaluateResponse.ResponseBody.create(var.getValue(), var.getVariablesReference()).setType(var.getType()).setIndexedVariables(
var.getIndexedVariables()).setNamedVariables(var.getNamedVariables()));
}
} catch (Exception e) {
future.completeExceptionally(Errors.errorFromEvaluate(e.getMessage()));
}
}
return false;
});
return future;
}
@Override
public CompletableFuture<ExceptionInfoResponse.ResponseBody> exceptionInfo(ExceptionInfoArguments args) {
CompletableFuture<ExceptionInfoResponse.ResponseBody> future = new CompletableFuture<>();
context.getThreadsHandler().executeInSuspendedThread(args.getThreadId(), (info) -> {
if (info == null) {
future.completeExceptionally(Errors.invalidThread(args.getThreadId()));
} else {
DebugException exception = info.getSuspendedEvent().getException();
if (exception == null) {
future.completeExceptionally(Errors.noStoredException());
} else {
DebugValue exceptionObject = exception.getExceptionObject();
String description = exceptionObject != null && exceptionObject.isReadable() ? exceptionObject.toDisplayString() : null;
DebugValue metaObject = exceptionObject != null ? exceptionObject.getMetaObject() : null;
String exceptionId = metaObject != null ? metaObject.getMetaSimpleName() : null;
future.complete(ExceptionInfoResponse.ResponseBody.create(exceptionId != null ? exceptionId : "Error", "unhandled").setDescription(description));
}
}
return false;
});
return future;
}
@Override
protected void connect(DebugProtocolClient clnt) {
this.client = clnt;
if (debuggerSession == null) {
DebuggerSession session = startDebuggerSession();
boolean isDisposed;
synchronized (this) {
isDisposed = disposed;
if (!isDisposed) {
debuggerSession = session;
}
}
if (isDisposed) {
session.close();
} else {
context.initSession(session);
}
}
context.initClient(client);
}
@Override
public LoggerProxy getLogger() {
return new LoggerProxy() {
@Override
public boolean isLoggable(Level level) {
return context.getLogger().isLoggable(level);
}
@Override
public void log(Level level, String msg) {
context.getLogger().log(level, msg);
}
@Override
public void log(Level level, String msg, Throwable thrown) {
context.getLogger().log(level, msg, thrown);
}
};
}
/**
* The executors created by Executors do not allow to join the threads in the pool. System
* threads must be joined before Engine close and ExecutorService.awaitTermination() does not
* guarantee threads termination.
*/
private static class OneTimeExecutor implements Executor {
private final TruffleInstrument.Env env;
private final String threadName;
private Reference<Thread> thread;
private boolean shutDown;
OneTimeExecutor(TruffleInstrument.Env env, String threadName) {
this.env = env;
this.threadName = threadName;
}
public synchronized void execute(Runnable command) {
if (shutDown) {
return;
}
if (thread != null) {
throw new IllegalStateException("This is a one-time executor.");
}
Thread t = env.createSystemThread(command);
t.setName(threadName);
t.start();
thread = new WeakReference<>(t);
}
synchronized void shutDownAndJoin() {
Thread t = (thread != null) ? thread.get() : null;
if (t != null) {
t.interrupt();
try {
t.join();
} catch (InterruptedException ex) {
// Interrupted
}
}
shutDown = true;
}
}
public CompletableFuture<?> start(final ServerSocket serverSocket) {
clientConnectionExecutor = new OneTimeExecutor(context.getEnv(), "DAP client connection thread");
context.getInfo().println("[Graal DAP] Starting server and listening on " + serverSocket.getLocalSocketAddress());
return CompletableFuture.runAsync(new Runnable() {
@Override
public void run() {
AtomicBoolean terminated = new AtomicBoolean(false);
try {
if (serverSocket.isClosed()) {
context.getErr().println("[Graal DAP] Server socket is closed.");
return;
}
onDispose(() -> {
terminated.set(true);
try {
serverSocket.close();
} catch (IOException e) {
context.getErr().println("[Graal DAP] Error while closing the server socket: " + e.getLocalizedMessage());
}
});
if (disposed) {
// Disposed in the mean time.
return;
}
try (Socket clientSocket = serverSocket.accept()) {
context.getInfo().println("[Graal DAP] Client connected on " + clientSocket.getRemoteSocketAddress());
OneTimeExecutor dapRequestExecutor = new OneTimeExecutor(context.getEnv(), "DAP request handler");
Future<?> listenFuture = Session.connect(DebugProtocolServerImpl.this, clientSocket.getInputStream(), clientSocket.getOutputStream(), dapRequestExecutor);
try {
listenFuture.get();
} catch (InterruptedException | ExecutionException e) {
context.getErr().println("[Graal DAP] Error: " + e.getLocalizedMessage());
} finally {
dapRequestExecutor.shutDownAndJoin();
}
}
} catch (IOException e) {
if (terminated.get() && (e instanceof SocketException)) {
// We've terminated the socket, thus we ignore any exceptions from it.
// serverSocket.accept() will always throw "Socket closed" exception
// when serverSocket.close() is called.
} else {
context.getErr().println("[Graal DAP] Error while connecting to client: " + e.getLocalizedMessage());
}
}
}
}, clientConnectionExecutor);
}
private DebuggerSession startDebuggerSession() {
Debugger tdbg = context.getEnv().lookup(context.getEnv().getInstruments().get("debugger"), Debugger.class);
DebuggerSession session = tdbg.startSession(new SuspendedCallbackImpl(), SourceElement.ROOT, SourceElement.STATEMENT);
session.setSourcePath(sourcePath);
session.setSteppingFilter(SuspensionFilter.newBuilder() //
.ignoreLanguageContextInitialization(!context.isInspectInitialization()) //
.includeInternal(context.isInspectInternal()) //
.sourceSectionAvailableOnly(true).build());
return session;
}
private final class SuspendedCallbackImpl implements SuspendedCallback {
@Override
public void onSuspend(SuspendedEvent event) {
try {
context.waitForRunPermission();
} catch (InterruptedException ex) {
}
SourceSection ss = event.getSourceSection();
if (debuggerSession == null) {
// Debugger has been disabled while waiting
return;
}
DebugValue returnValue = event.getReturnValue();
if (event.hasSourceElement(SourceElement.ROOT) && event.getBreakpoints().isEmpty()) {
if ((!event.hasSourceElement(SourceElement.STATEMENT) && event.getSuspendAnchor() == SuspendAnchor.BEFORE) ||
(event.getSuspendAnchor() == SuspendAnchor.AFTER && returnValue == null)) {
// We're at the begining of a `RootTag` node, or
// we're at the end of `RootTag` node and have no return value.
// We use `RootTag` to intercept return values of functions during stepping.
// But if there's no return value, there's no point in suspending at the end of
// a function. That would cause an unnecessary distraction.
event.prepareStepInto(STEP_CONFIG);
return;
}
}
context.getLoadedSourcesHandler().assureLoaded(ss.getSource());
context.getThreadsHandler().threadSuspended(Thread.currentThread(), event);
}
}
private class ConsoleOutputListener implements OutputHandler.Listener {
private final String category;
ConsoleOutputListener(String category) {
this.category = category;
}
@Override
public void outputText(String text) {
DebugProtocolClient debugClient = context.getClient();
if (client != null) {
OutputEvent.EventBody event = OutputEvent.EventBody.create(text);
event.setCategory(category);
debugClient.output(event);
}
}
}
private static final class AllContextsCancel implements ContextsListener {
private final Phaser allClosed = new Phaser(1);
@Override
public void onContextCreated(TruffleContext context) {
allClosed.register();
context.closeCancelled(null, "Cancel on debugger disconnect.");
}
@Override
public void onLanguageContextCreated(TruffleContext context, LanguageInfo language) {
}
@Override
public void onLanguageContextInitialized(TruffleContext context, LanguageInfo language) {
}
@Override
public void onLanguageContextFinalized(TruffleContext context, LanguageInfo language) {
}
@Override
public void onLanguageContextDisposed(TruffleContext context, LanguageInfo language) {
}
@Override
public void onContextClosed(TruffleContext context) {
allClosed.arriveAndDeregister();
}
void waitForAllCanceled() {
allClosed.arriveAndAwaitAdvance();
}
}
}
|
oracle/graalpython | 34,674 | graalpython/com.oracle.graal.python.test/src/com/oracle/graal/python/test/compiler/CompilerTests.java | /*
* Copyright (c) 2019, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* The Universal Permissive License (UPL), Version 1.0
*
* Subject to the condition set forth below, permission is hereby granted to any
* person obtaining a copy of this software, associated documentation and/or
* data (collectively the "Software"), free of charge and under any and all
* copyright rights in the Software, and any and all patent rights owned or
* freely licensable by each licensor hereunder covering either (i) the
* unmodified Software as contributed to or provided by such licensor, or (ii)
* the Larger Works (as defined below), to deal in both
*
* (a) the Software, and
*
* (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
* one is included with the Software each a "Larger Work" to which the Software
* is contributed by such licensors),
*
* without restriction, including without limitation the rights to copy, create
* derivative works of, display, perform, and distribute the Software and make,
* use, sell, offer for sale, import, export, have made, and have sold the
* Software and the Larger Work(s), and to sublicense the foregoing rights on
* either these or other terms.
*
* This license is subject to the following condition:
*
* The above copyright notice and either this complete permission notice or at a
* minimum a reference to the UPL must be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.oracle.graal.python.test.compiler;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.EnumSet;
import org.hamcrest.CoreMatchers;
import org.hamcrest.MatcherAssert;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import com.oracle.graal.python.compiler.CodeUnit;
import com.oracle.graal.python.compiler.CompilationUnit;
import com.oracle.graal.python.compiler.Compiler;
import com.oracle.graal.python.pegparser.FutureFeature;
import com.oracle.graal.python.pegparser.InputType;
import com.oracle.graal.python.pegparser.Parser;
import com.oracle.graal.python.pegparser.ParserCallbacks;
import com.oracle.graal.python.pegparser.sst.ModTy;
import com.oracle.graal.python.pegparser.tokenizer.SourceRange;
import com.oracle.graal.python.runtime.PythonOptions;
import com.oracle.graal.python.test.GraalPythonEnvVars;
import com.oracle.graal.python.test.PythonTests;
public class CompilerTests extends PythonTests {
public CompilerTests() {
}
@Before
public void beforeTest() {
// These tests are coupled to the manual bytecode interpreter. They shouldn't run if we're
// using the Bytecode DSL interpreter.
Assume.assumeFalse(PythonOptions.ENABLE_BYTECODE_DSL_INTERPRETER);
}
@Rule public TestName name = new TestName();
@Test
public void testBinaryOp() {
doTest("1 + 1");
}
@Test
public void testComplexNumber() {
doTest("-2 + 3j");
}
@Test
public void testMinusFolding() {
doTest("-1 * -7.0");
}
@Test
public void testAssignment() {
doTest("a = 12");
}
@Test
public void testAugAssignment() {
doTest("a += 12.0");
}
@Test
public void testAugAssignmentAttr() {
doTest("a.b += 12.0");
}
@Test
public void testAugAssignmentSubscr() {
doTest("a[b] += 12.0");
}
@Test
public void testAnnAssignment() {
doTest("a: int = 12");
}
@Test
public void testDel() {
doTest("del a");
}
@Test
public void testGetItem() {
doTest("a[3]");
}
@Test
public void testSetItem() {
doTest("a[3] = 1");
}
@Test
public void testDelItem() {
doTest("del a[3]");
}
@Test
public void testSlice() {
doTest("a[3:9]");
}
@Test
public void testSliceStep() {
doTest("a[3:9:2]");
}
@Test
public void testCall() {
doTest("range(num)");
}
@Test
public void testLogicOperators() {
doTest("a and b or not c");
}
@Test
public void testManyArgs() {
// Test collecting more args that a single COLLECT_FROM_STACK instruction can handle
String source = "print(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36)";
doTest(source);
}
@Test
public void testCallKeyword() {
doTest("print('test', end=';')");
}
@Test
public void testCallMultiStarArgs() {
doTest("foo(1, *a, 2, *b, 3)");
}
@Test
public void testCallMultiStarKwargs() {
doTest("foo(a=1, **a, b=2, **b, c=3)");
}
@Test
public void testVarArgs() {
String source = "def foo(*args):\n" +
" print(*args)\n";
doTest(source);
}
@Test
public void testVarKwargs() {
String source = "def foo(**kwargs):\n" +
" print(**kwargs)\n";
doTest(source);
}
@Test
public void testArgsCombination() {
String source = "def foo(a, /, b, *c, d, **e):\n" +
" print(a, b, c, d, e)\n";
doTest(source);
}
@Test
public void testArgAnnotations() {
String source = "def foo(a:1, /, b:2, *c:3, d:4, **e:5):\n" +
" print(a, b, c, d, e)\n";
doTest(source);
}
@Test
public void testClassAnnotations() {
String source = "class Foo:\n" +
" attr: a[str]";
doTest(source);
}
@Test
public void testClassAnnotationsFuture() {
String source = "from __future__ import annotations\n" +
"class Foo:\n" +
" attr: a[str]";
doTest(source);
}
@Test
public void testFor() {
doTest("for i in [1,2]:\n pass");
}
@Test
public void testWhile() {
doTest("while False: pass");
}
@Test
public void testForBreakContinue() {
String source = "for i in range(10):\n" +
" if i == 3:\n" +
" break\n" +
" else:\n" +
" continue\n" +
"else:\n" +
" print('else')";
doTest(source);
}
@Test
public void testWhileBreakContinue() {
String source = "i = 0\n" +
"while i < 10:\n" +
" if i == 3:\n" +
" break\n" +
" else:\n" +
" i += 1\n" +
" continue\n" +
"else:\n" +
" print('else')";
doTest(source);
}
@Test
public void testBreakFromWith() {
String source = "for i in range(10):\n" +
" with foo() as cm:\n" +
" break\n";
doTest(source);
}
@Test
public void testBreakFromTry() {
String source = "for i in range(10):\n" +
" try:\n" +
" break\n" +
" finally:" +
" print('finally')";
doTest(source);
}
@Test
public void testBreakFromExcept() {
String source = "for i in range(10):\n" +
" try:\n" +
" 1 / 0\n" +
" except RuntimeError as e:" +
" break";
doTest(source);
}
@Test
public void testBreakFromFinally() {
String source = "for i in range(10):\n" +
" try:\n" +
" if i:\n" +
" break\n" +
" print(i)\n" +
" finally:\n" +
" print('finally')\n" +
" break";
doTest(source);
}
@Test
public void testReturnFromWith() {
String source = "def foo():\n" +
" for i in range(10):\n" +
" with foo() as cm:\n" +
" return a\n";
doTest(source);
}
@Test
public void testReturnFromTry() {
String source = "def foo():\n" +
" for i in range(10):\n" +
" try:\n" +
" return a\n" +
" finally:" +
" print('finally')";
doTest(source);
}
@Test
public void testReturnFromExcept() {
String source = "def foo():\n" +
" for i in range(10):\n" +
" try:\n" +
" 1 / 0\n" +
" except RuntimeError as e:" +
" return a";
doTest(source);
}
@Test
public void testReturnFromFinally() {
String source = "def foo():\n" +
" for i in range(10):\n" +
" try:\n" +
" if i:\n" +
" return a\n" +
" print(i)\n" +
" finally:\n" +
" print('finally')\n" +
" return b";
doTest(source);
}
@Test
public void testFinallyCancelReturn() {
String source = "def foo():\n" +
" for i in range(10):\n" +
" try:\n" +
" return a\n" +
" finally:" +
" continue";
doTest(source);
}
@Test
public void testTryExcept() {
String s = "print('before')\n" +
"try:\n" +
" print('try')\n" +
"except TypeError as e:\n" +
" print('except1')\n" +
"except ValueError as e:\n" +
" print('except2')\n" +
"print('after')\n";
doTest(s);
}
@Test
public void testTryExceptBare() {
String s = "print('before')\n" +
"try:\n" +
" print('try')\n" +
"except TypeError as e:\n" +
" print('except1')\n" +
"except:\n" +
" print('except bare')\n" +
"print('after')\n";
doTest(s);
}
@Test
public void testTryFinally() {
String s = "print('before')\n" +
"try:\n" +
" print('try')\n" +
"finally:\n" +
" print('finally')\n" +
"print('after')\n";
doTest(s);
}
@Test
public void testTryFinallyNested() {
String source = "def foo(obj):\n" +
" for x in obj:\n" +
" print(x)\n" +
" try:\n" +
" try:\n" +
" print('try')\n" +
" finally:\n" +
" print('finally1')\n" +
" finally:\n" +
" print('finally2')\n";
doTest(source);
}
@Test
public void testTryExceptFinally() {
String s = "print('before')\n" +
"try:\n" +
" print('try')\n" +
"except TypeError as e:\n" +
" print('except1')\n" +
"except ValueError as e:\n" +
" print('except2')\n" +
"finally:\n" +
" print('finally')\n" +
"print('after')\n";
doTest(s);
}
@Test
public void testTryExceptElse() {
String s = "print('before')\n" +
"try:\n" +
" print('try')\n" +
"except TypeError as e:\n" +
" print('except1')\n" +
"except ValueError as e:\n" +
" print('except2')\n" +
"else:\n" +
" print('else')\n" +
"print('after')\n";
doTest(s);
}
@Test
public void testTryExceptElseFinally() {
String s = "print('before')\n" +
"try:\n" +
" print('try')\n" +
"except TypeError as e:\n" +
" print('except1')\n" +
"except ValueError as e:\n" +
" print('except2')\n" +
"else:\n" +
" print('else')\n" +
"finally:\n" +
" print('finally')\n" +
"print('after')\n";
doTest(s);
}
@Test
public void testWith() {
String s = "print('before')\n" +
"with open('/dev/null') as f:\n" +
" f.write('foo')\n" +
"print('after')";
doTest(s);
}
@Test
public void testWithMultiple() {
String s = "print('before')\n" +
"with open('/dev/null') as f, open('/tmp/foo'):\n" +
" f.write('foo')\n" +
"print('after')";
doTest(s);
}
@Test
public void testDefun() {
String source = "def docompute(num, num2=5):\n" +
" return (num, num2)\n";
doTest(source);
}
@Test
public void testReturnPlain() {
String source = "def foo():\n" +
" return\n";
doTest(source);
}
@Test
public void testClosure() {
String s = "def foo():\n" +
" x = 1\n" +
" def bar():\n" +
" nonlocal x\n" +
" print(x)\n" +
" x = 2\n" +
" bar()\n" +
" print(x)\n" +
" x = 3\n";
doTest(s);
}
@Test
public void testIf() {
String source = "if False:\n" +
" print(True)\n" +
"else:\n" +
" print(False)\n";
doTest(source);
}
@Test
public void testIfExpression() {
doTest("t if cond else f\n");
}
@Test
public void testClass() {
String source = "class Foo:\n" +
" c = 64\n" +
" def __init__(self, arg):\n" +
" self.var = arg\n";
doTest(source);
}
@Test
public void testSuper() {
String source = "class Foo:\n" +
" def boo(self):\n" +
" print('boo')\n" +
"class Bar(Foo):\n" +
" def boo(self):\n" +
" super().boo()\n";
doTest(source);
}
@Test
public void testEmptyList() {
doTest("[]");
}
@Test
public void testEmptyTuple() {
doTest("()");
}
@Test
public void testEmptyDict() {
doTest("{}");
}
@Test
public void testTupleLiteralInts() {
doTest("(1, 2, 3)");
}
@Test
public void testTupleLiteralDoubles() {
doTest("(1.0, 2.0, 3.0)");
}
@Test
public void testTupleLiteralBooleans() {
doTest("(False, True)");
}
@Test
public void testTupleLiteralObjects() {
doTest("('a', 1, None)");
}
@Test
public void testTupleLiteralMixed() {
doTest("(1, 2, 3.0)");
}
@Test
public void testTupleLiteralNonConstant() {
doTest("(1, 2, [3])");
}
@Test
public void testTupleLiteralMixedIntegers() {
doTest("(1, 17179869184, 3)");
}
@Test
public void testTupleLiteralExpand() {
doTest("(1, 2, 3, *a, 5)");
}
@Test
public void testListLiteral() {
doTest("[1, 2, 3]");
}
@Test
public void testListLiteralExpand() {
doTest("[1, 2, 3, *a, 5]");
}
@Test
public void testSetLiteral() {
doTest("{1, 2, 3}");
}
@Test
public void testSetLiteralExpand() {
doTest("{1, 2, 3, *a, 5}");
}
@Test
public void testDictLiteral() {
doTest("{'a': 'b', 1: 2}");
}
@Test
public void testDictLiteralExpand() {
doTest("{'a': 'b', 1: 2, **a, None: True}");
}
@Test
public void testUnpack() {
doTest("a, b = 1, 2");
}
@Test
public void testUnpackEx() {
doTest("a, *b, c = 1, 2, 3, 4, 5");
}
@Test
public void testListComprehension() {
String source = "[str(x) for y in [[1, 2, 3], [4, 5, 6]] for x in y if x < 5]";
doTest(source);
}
@Test
public void testFString() {
doTest("f'before{a}middle{b!r:5}after'");
}
@Test
public void testStringSurrogates() {
doTest("'\\U00010400' != '\\uD801\\uDC00'");
}
@Test
public void testNestedListComprehension() {
String source = "[[x for x in range(5)] for y in range(3)]";
doTest(source);
}
@Test
public void testSetComprehension() {
String source = "{x * 2 for x in range(10) if x % 2 == 0}";
doTest(source);
}
@Test
public void testDictComprehension() {
String source = "{x: str(x) for x in range(10)}";
doTest(source);
}
@Test
public void testLambda() {
doTest("lambda x, *args: args[x]");
}
@Test
public void testYieldPlain() {
String source = "def gen(a):\n" +
" yield\n";
doTest(source);
}
@Test
public void testYieldValue() {
String source = "def gen(a):\n" +
" yield a + 1\n";
doTest(source);
}
@Test
public void testYieldFrom() {
String source = "def gen(a):\n" +
" yield from a\n";
doTest(source);
}
@Test
public void testCoroutine() {
String source = "async def foo(a):\n" +
" await a\n";
doTest(source);
}
@Test
public void testYieldExpression() {
String source = "def gen(a):\n" +
" b = yield a\n";
doTest(source);
}
@Test
public void testGeneratorComprehension() {
String source = "(str(x) for y in [[1, 2, 3], [4, 5, 6]] for x in y if x < 5)";
doTest(source);
}
@Test
public void testExtendedArgs() {
StringBuilder source = new StringBuilder();
source.append("if a:\n");
for (int i = 0; i < 260; i++) {
source.append(String.format(" a.f%d('%d')\n", i, i));
}
source.append("else:\n");
source.append(" print('else')");
doTest(source.toString());
}
@Test
public void testBenchmark() {
String source = "def docompute(num):\n" +
" for i in range(num):\n" +
" sum_ = 0.0\n" +
" j = 0\n" +
" while j < num:\n" +
" sum_ += 1.0 / (((i + j) * (i + j + 1) >> 1) + i + 1)\n" +
" j += 1\n" +
"\n" +
" return sum_\n" +
"\n" +
"\n" +
"def measure(num):\n" +
" for run in range(num):\n" +
" sum_ = docompute(10000) # 10000\n" +
" print('sum', sum_)\n" +
"\n" +
"\n" +
"def __benchmark__(num=5):\n" +
" measure(num)\n";
doTest(source);
}
@Test
public void testBenchmark2() {
String source = "" +
"class HandlerTask(Task):\n" +
" def __init__(self,i,p,w,s,r):\n" +
" global Task\n" +
" x = 0\n" +
" raise ValueError\n" +
// " def f():\n" +
// " nonlocal x\n" +
// " x = 1\n" +
" Task.__init__(self,i,p,w,s,r)\n";
doTest(source);
}
@Test
public void testImport() {
String source = "" +
"if __name__ == '__main__':\n" +
" import sys\n" +
" if not (len(sys.argv) == 1 and sys.argv[0] == 'java_embedding_bench'):\n" +
" import time\n" +
" start = time.time()\n" +
" if len(sys.argv) >= 2:\n" +
" num = int(sys.argv[1])\n" +
" __benchmark__(num)\n" +
" else:\n" +
" __benchmark__()\n" +
" print(\"%s took %s s\" % (__file__, time.time() - start))\n";
doTest(source);
}
@Test
public void testImportAs() {
doTest("import a.b.c as d");
}
@Test
public void testImportFrom() {
doTest("from math import sqrt, sin as sine");
}
@Test
public void testImportStar() {
doTest("from a.b import *");
}
@Test
public void testEval() {
doTest("1", InputType.EVAL);
}
@Test
public void testSingle() {
doTest("1", InputType.SINGLE);
}
@Test
public void testLoadClassDefRef() {
String s = "def f(x): \n" +
" class C: y = x\n" +
"f(1)";
doTest(s);
}
@Test
public void testNamedExpr() {
String s = "if x := g():\n print(x)\n";
doTest(s);
}
@Test
public void testMatchValueConst() {
String source = "" +
"match 1:\n" +
" case 1:\n" +
" pass\n";
doTest(source);
}
@Test
public void testMatchValue() {
String source = "" +
"s = 1\n" +
"match s:\n" +
" case 1:\n" +
" pass\n";
doTest(source);
}
@Test
public void testMatchValueWithDefault() {
String source = "" +
"s = 1\n" +
"match s:\n" +
" case 1:\n" +
" pass\n" +
" case _:\n" +
" pass\n";
doTest(source);
}
@Test
public void testMatchSingletonBoolean() {
String source = "" +
"match 1:\n" +
" case True:\n" +
" pass\n";
doTest(source);
}
@Test
public void testMatchSingletonNone() {
String source = "" +
"match 1:\n" +
" case None:\n" +
" pass\n";
doTest(source);
}
@Test
public void testGuard() {
String source = "" +
"x = 1\n" +
"match 1:\n" +
" case 1 if x == 1:\n" +
" x\n";
doTest(source);
}
@Test
public void testMatchAs() {
String source = "" +
"match 1:\n" +
" case 1 as x:\n" +
" x\n";
doTest(source);
}
@Test
public void testMatchAs2() {
String source = "" +
"match 1:\n" +
" case 1 as x:\n" +
" x\n" +
" case 2 as y:\n" +
" x\n";
doTest(source);
}
@Test
public void testMatchAsDefault() {
String source = "" +
"match 1:\n" +
" case 1 as x:\n" +
" x\n" +
" case _:\n" +
" x\n";
doTest(source);
}
@Test
public void testMatchAsGuard() {
String source = "" +
"match 1:\n" +
" case 1 as x if x == 1:\n" +
" x\n";
doTest(source);
}
@Test
public void testWildcard() {
String source = "" +
"match 1:\n" +
" case _:\n" +
" pass";
doTest(source);
}
@Test
public void testSeq() {
String source = "" +
"match (1):\n" +
" case [1]:\n" +
" pass";
doTest(source);
}
@Test
public void testSeqWildcard() {
String source = "" +
"match (1):\n" +
" case [_]:\n" +
" pass";
doTest(source);
}
@Test
public void testSeqWildcardStar() {
String source = "" +
"match (1):\n" +
" case [*_]:\n" +
" pass";
doTest(source);
}
@Test
public void testSeqWildcardSubscript() {
String source = "" +
"match (1, 2):\n" +
" case [_, x]:\n" +
" x";
doTest(source);
}
@Test
public void testSeqWildcardStarSubscript() {
String source = "" +
"match (1, 2, 3):\n" +
" case [*_, y]:\n" +
" y";
doTest(source);
}
@Test
public void testMatchClass() {
String source = "" +
"match 1:\n" +
" case int(x):\n" +
" pass";
doTest(source);
}
@Test
public void testMatchOr() {
String source = "" +
"match 0:\n" +
" case 0 | 1:\n" +
" pass";
doTest(source);
}
@Test
public void testMatchOrRot() {
String source = "" +
"match (0, 1):\n" +
" case ((a, b) | (b, a)):\n" +
" pass";
doTest(source);
}
@Test
public void testMatchMapping() {
String source = "" +
"match {1:1}:\n" +
" case {1:1}:\n" +
" pass";
doTest(source);
}
@Test
public void testMatchMappingSubpattern() {
String source = "" +
"match {1:1}:\n" +
" case {1:x}:\n" +
" x";
doTest(source);
}
@Test
public void testMatchMappingStar() {
String source = "" +
"match {1:1}:\n" +
" case {**z}:\n" +
" z";
doTest(source);
}
@Test
public void testAssignToDebug() {
checkSyntaxErrorMessage("obj.__debug__ = 1", "cannot assign to __debug__");
checkSyntaxErrorMessage("__debug__ = 1", "cannot assign to __debug__");
checkSyntaxErrorMessage("(a, __debug__, c) = (1, 2, 3)", "cannot assign to __debug__");
checkSyntaxErrorMessage("(a, *__debug__, c) = (1, 2, 3)", "cannot assign to __debug__");
checkSyntaxErrorMessage("f(__debug__=1)", "cannot assign to __debug__");
checkSyntaxErrorMessage("__debug__: int", "cannot assign to __debug__");
checkSyntaxErrorMessage("__debug__ += 1", "cannot assign to __debug__");
checkSyntaxErrorMessage("def f(*, x=lambda __debug__:0): pass", "cannot assign to __debug__");
checkSyntaxErrorMessage("def f(*args:(lambda __debug__:0)): pass", "cannot assign to __debug__");
checkSyntaxErrorMessage("def f(**kwargs:(lambda __debug__:0)): pass", "cannot assign to __debug__");
checkSyntaxErrorMessage("def f(**__debug__): pass", "cannot assign to __debug__");
checkSyntaxErrorMessage("def f(*xx, __debug__): pass", "cannot assign to __debug__");
checkSyntaxErrorMessage("match 1:\n\tcase 1 as __debug__:\n\t\tpass", "cannot assign to __debug__");
}
@Test
public void testNoStarredExprHere() {
checkSyntaxErrorMessage("*[1,2,3]", "can't use starred expression here");
checkSyntaxErrorMessage("*a = range(5)", "starred assignment target must be in a list or tuple");
checkSyntaxErrorMessage("b = *a", "can't use starred expression here");
}
@Test
public void testRepeatedKwArg() {
checkSyntaxErrorMessage("f(p, k1=50, *(1,2), k1=100)", "keyword argument repeated: k1");
}
@Test
public void testYieldOutsideFunction() {
checkSyntaxErrorMessage("yield", "'yield' outside function");
checkSyntaxErrorMessage("class foo:yield 1", "'yield' outside function");
checkSyntaxErrorMessage("class foo:yield from ()", "'yield from' outside function");
checkSyntaxErrorMessage("def g(a:(yield)): pass", "'yield' outside function");
checkSyntaxErrorMessage("yield x", "'yield' outside function");
checkSyntaxErrorMessage("class C: yield 1", "'yield' outside function");
}
@Test
public void testReturnFromAsyncWith() {
String source = "async def f():\n" +
" async with a:\n" +
" return";
doTest(source);
}
@Test
public void testReturnFromAsyncWithT() {
String source = "async def f():\n" +
" async with a:\n" +
" async with b:\n" +
" return";
doTest(source);
}
private void doTest(String src) {
doTest(src, InputType.FILE);
}
private void doTest(String src, InputType type) {
checkCodeUnit(assemble(src, type));
}
private static void checkSyntaxErrorMessage(String src, String msg) {
try {
assemble(src, InputType.FILE);
fail("Expected SyntaxError: " + msg);
} catch (SyntaxError e) {
Assert.assertEquals(ParserCallbacks.ErrorType.Syntax, e.errorType);
MatcherAssert.assertThat(e.message, CoreMatchers.containsString(msg));
}
}
private static CodeUnit assemble(String src, InputType type) {
ParserCallbacks parserCallbacks = new TestParserCallbacksImpl();
Parser parser = Compiler.createParser(src, parserCallbacks, type, false, false);
ModTy result = (ModTy) parser.parse();
Compiler compiler = new Compiler(parserCallbacks);
CompilationUnit cu = compiler.compile(result, EnumSet.noneOf(Compiler.Flags.class), 2, EnumSet.noneOf(FutureFeature.class));
return cu.assemble();
}
private void checkCodeUnit(CodeUnit co) {
String coString = co.toString();
Path goldenFile = Paths.get(GraalPythonEnvVars.graalPythonTestsHome(),
"com.oracle.graal.python.test", "testData", "goldenFiles",
this.getClass().getSimpleName(),
name.getMethodName() + ".co");
try {
if (!Files.exists(goldenFile)) {
Files.createDirectories(goldenFile.getParent());
Files.writeString(goldenFile, coString);
} else {
Assert.assertEquals(Files.readString(goldenFile), coString);
}
} catch (IOException ex) {
fail(ex.getMessage());
}
}
static class TestParserCallbacksImpl implements ParserCallbacks {
@Override
public void safePointPoll() {
}
@Override
public RuntimeException reportIncompleteSource(int line) {
fail("Unexpected call to reportIncompleteSource");
throw new IllegalStateException("unreachable");
}
@Override
public RuntimeException onError(ErrorType errorType, SourceRange sourceRange, String message) {
throw new SyntaxError(errorType, message);
}
@Override
public void onWarning(WarningType warningType, SourceRange sourceRange, String message) {
throw new AssertionError("Unexpected " + warningType + " warning: " + message);
}
}
private static final class SyntaxError extends RuntimeException {
private static final long serialVersionUID = 6182610312044069775L;
final ParserCallbacks.ErrorType errorType;
final String message;
SyntaxError(ParserCallbacks.ErrorType errorType, String message) {
this.errorType = errorType;
this.message = message;
}
}
}
|
googleapis/google-cloud-java | 35,824 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/schema/predict/instance/TextExtractionPredictionInstance.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/schema/predict/instance/text_extraction.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1.schema.predict.instance;
/**
*
*
* <pre>
* Prediction input format for Text Extraction.
* </pre>
*
* Protobuf type {@code
* google.cloud.aiplatform.v1beta1.schema.predict.instance.TextExtractionPredictionInstance}
*/
public final class TextExtractionPredictionInstance extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.schema.predict.instance.TextExtractionPredictionInstance)
TextExtractionPredictionInstanceOrBuilder {
private static final long serialVersionUID = 0L;
// Use TextExtractionPredictionInstance.newBuilder() to construct.
private TextExtractionPredictionInstance(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TextExtractionPredictionInstance() {
content_ = "";
mimeType_ = "";
key_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TextExtractionPredictionInstance();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstanceProto
.internal_static_google_cloud_aiplatform_v1beta1_schema_predict_instance_TextExtractionPredictionInstance_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstanceProto
.internal_static_google_cloud_aiplatform_v1beta1_schema_predict_instance_TextExtractionPredictionInstance_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance.class,
com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance.Builder.class);
}
public static final int CONTENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object content_ = "";
/**
*
*
* <pre>
* The text snippet to make the predictions on.
* </pre>
*
* <code>string content = 1;</code>
*
* @return The content.
*/
@java.lang.Override
public java.lang.String getContent() {
java.lang.Object ref = content_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
content_ = s;
return s;
}
}
/**
*
*
* <pre>
* The text snippet to make the predictions on.
* </pre>
*
* <code>string content = 1;</code>
*
* @return The bytes for content.
*/
@java.lang.Override
public com.google.protobuf.ByteString getContentBytes() {
java.lang.Object ref = content_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
content_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int MIME_TYPE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object mimeType_ = "";
/**
*
*
* <pre>
* The MIME type of the text snippet. The supported MIME types are listed
* below.
* - text/plain
* </pre>
*
* <code>string mime_type = 2;</code>
*
* @return The mimeType.
*/
@java.lang.Override
public java.lang.String getMimeType() {
java.lang.Object ref = mimeType_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
mimeType_ = s;
return s;
}
}
/**
*
*
* <pre>
* The MIME type of the text snippet. The supported MIME types are listed
* below.
* - text/plain
* </pre>
*
* <code>string mime_type = 2;</code>
*
* @return The bytes for mimeType.
*/
@java.lang.Override
public com.google.protobuf.ByteString getMimeTypeBytes() {
java.lang.Object ref = mimeType_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
mimeType_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int KEY_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object key_ = "";
/**
*
*
* <pre>
* This field is only used for batch prediction. If a key is provided, the
* batch prediction result will by mapped to this key. If omitted, then the
* batch prediction result will contain the entire input instance. Vertex AI
* will not check if keys in the request are duplicates, so it is up to the
* caller to ensure the keys are unique.
* </pre>
*
* <code>string key = 3;</code>
*
* @return The key.
*/
@java.lang.Override
public java.lang.String getKey() {
java.lang.Object ref = key_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
key_ = s;
return s;
}
}
/**
*
*
* <pre>
* This field is only used for batch prediction. If a key is provided, the
* batch prediction result will by mapped to this key. If omitted, then the
* batch prediction result will contain the entire input instance. Vertex AI
* will not check if keys in the request are duplicates, so it is up to the
* caller to ensure the keys are unique.
* </pre>
*
* <code>string key = 3;</code>
*
* @return The bytes for key.
*/
@java.lang.Override
public com.google.protobuf.ByteString getKeyBytes() {
java.lang.Object ref = key_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
key_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(content_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, content_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(mimeType_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, mimeType_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(key_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, key_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(content_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, content_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(mimeType_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, mimeType_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(key_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, key_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof
com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.schema.predict.instance.TextExtractionPredictionInstance
other =
(com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance)
obj;
if (!getContent().equals(other.getContent())) return false;
if (!getMimeType().equals(other.getMimeType())) return false;
if (!getKey().equals(other.getKey())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + CONTENT_FIELD_NUMBER;
hash = (53 * hash) + getContent().hashCode();
hash = (37 * hash) + MIME_TYPE_FIELD_NUMBER;
hash = (53 * hash) + getMimeType().hashCode();
hash = (37 * hash) + KEY_FIELD_NUMBER;
hash = (53 * hash) + getKey().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.schema.predict.instance.TextExtractionPredictionInstance
prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Prediction input format for Text Extraction.
* </pre>
*
* Protobuf type {@code
* google.cloud.aiplatform.v1beta1.schema.predict.instance.TextExtractionPredictionInstance}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.schema.predict.instance.TextExtractionPredictionInstance)
com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstanceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstanceProto
.internal_static_google_cloud_aiplatform_v1beta1_schema_predict_instance_TextExtractionPredictionInstance_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstanceProto
.internal_static_google_cloud_aiplatform_v1beta1_schema_predict_instance_TextExtractionPredictionInstance_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance.class,
com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance.Builder.class);
}
// Construct using
// com.google.cloud.aiplatform.v1beta1.schema.predict.instance.TextExtractionPredictionInstance.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
content_ = "";
mimeType_ = "";
key_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstanceProto
.internal_static_google_cloud_aiplatform_v1beta1_schema_predict_instance_TextExtractionPredictionInstance_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance
build() {
com.google.cloud.aiplatform.v1beta1.schema.predict.instance.TextExtractionPredictionInstance
result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance
buildPartial() {
com.google.cloud.aiplatform.v1beta1.schema.predict.instance.TextExtractionPredictionInstance
result =
new com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1beta1.schema.predict.instance.TextExtractionPredictionInstance
result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.content_ = content_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.mimeType_ = mimeType_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.key_ = key_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof
com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance) {
return mergeFrom(
(com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance)
other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.aiplatform.v1beta1.schema.predict.instance.TextExtractionPredictionInstance
other) {
if (other
== com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance.getDefaultInstance()) return this;
if (!other.getContent().isEmpty()) {
content_ = other.content_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getMimeType().isEmpty()) {
mimeType_ = other.mimeType_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getKey().isEmpty()) {
key_ = other.key_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
content_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
mimeType_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
key_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object content_ = "";
/**
*
*
* <pre>
* The text snippet to make the predictions on.
* </pre>
*
* <code>string content = 1;</code>
*
* @return The content.
*/
public java.lang.String getContent() {
java.lang.Object ref = content_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
content_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The text snippet to make the predictions on.
* </pre>
*
* <code>string content = 1;</code>
*
* @return The bytes for content.
*/
public com.google.protobuf.ByteString getContentBytes() {
java.lang.Object ref = content_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
content_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The text snippet to make the predictions on.
* </pre>
*
* <code>string content = 1;</code>
*
* @param value The content to set.
* @return This builder for chaining.
*/
public Builder setContent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
content_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The text snippet to make the predictions on.
* </pre>
*
* <code>string content = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearContent() {
content_ = getDefaultInstance().getContent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The text snippet to make the predictions on.
* </pre>
*
* <code>string content = 1;</code>
*
* @param value The bytes for content to set.
* @return This builder for chaining.
*/
public Builder setContentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
content_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object mimeType_ = "";
/**
*
*
* <pre>
* The MIME type of the text snippet. The supported MIME types are listed
* below.
* - text/plain
* </pre>
*
* <code>string mime_type = 2;</code>
*
* @return The mimeType.
*/
public java.lang.String getMimeType() {
java.lang.Object ref = mimeType_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
mimeType_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The MIME type of the text snippet. The supported MIME types are listed
* below.
* - text/plain
* </pre>
*
* <code>string mime_type = 2;</code>
*
* @return The bytes for mimeType.
*/
public com.google.protobuf.ByteString getMimeTypeBytes() {
java.lang.Object ref = mimeType_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
mimeType_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The MIME type of the text snippet. The supported MIME types are listed
* below.
* - text/plain
* </pre>
*
* <code>string mime_type = 2;</code>
*
* @param value The mimeType to set.
* @return This builder for chaining.
*/
public Builder setMimeType(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
mimeType_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The MIME type of the text snippet. The supported MIME types are listed
* below.
* - text/plain
* </pre>
*
* <code>string mime_type = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearMimeType() {
mimeType_ = getDefaultInstance().getMimeType();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The MIME type of the text snippet. The supported MIME types are listed
* below.
* - text/plain
* </pre>
*
* <code>string mime_type = 2;</code>
*
* @param value The bytes for mimeType to set.
* @return This builder for chaining.
*/
public Builder setMimeTypeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
mimeType_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object key_ = "";
/**
*
*
* <pre>
* This field is only used for batch prediction. If a key is provided, the
* batch prediction result will by mapped to this key. If omitted, then the
* batch prediction result will contain the entire input instance. Vertex AI
* will not check if keys in the request are duplicates, so it is up to the
* caller to ensure the keys are unique.
* </pre>
*
* <code>string key = 3;</code>
*
* @return The key.
*/
public java.lang.String getKey() {
java.lang.Object ref = key_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
key_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* This field is only used for batch prediction. If a key is provided, the
* batch prediction result will by mapped to this key. If omitted, then the
* batch prediction result will contain the entire input instance. Vertex AI
* will not check if keys in the request are duplicates, so it is up to the
* caller to ensure the keys are unique.
* </pre>
*
* <code>string key = 3;</code>
*
* @return The bytes for key.
*/
public com.google.protobuf.ByteString getKeyBytes() {
java.lang.Object ref = key_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
key_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* This field is only used for batch prediction. If a key is provided, the
* batch prediction result will by mapped to this key. If omitted, then the
* batch prediction result will contain the entire input instance. Vertex AI
* will not check if keys in the request are duplicates, so it is up to the
* caller to ensure the keys are unique.
* </pre>
*
* <code>string key = 3;</code>
*
* @param value The key to set.
* @return This builder for chaining.
*/
public Builder setKey(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
key_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* This field is only used for batch prediction. If a key is provided, the
* batch prediction result will by mapped to this key. If omitted, then the
* batch prediction result will contain the entire input instance. Vertex AI
* will not check if keys in the request are duplicates, so it is up to the
* caller to ensure the keys are unique.
* </pre>
*
* <code>string key = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearKey() {
key_ = getDefaultInstance().getKey();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* This field is only used for batch prediction. If a key is provided, the
* batch prediction result will by mapped to this key. If omitted, then the
* batch prediction result will contain the entire input instance. Vertex AI
* will not check if keys in the request are duplicates, so it is up to the
* caller to ensure the keys are unique.
* </pre>
*
* <code>string key = 3;</code>
*
* @param value The bytes for key to set.
* @return This builder for chaining.
*/
public Builder setKeyBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
key_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.schema.predict.instance.TextExtractionPredictionInstance)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.schema.predict.instance.TextExtractionPredictionInstance)
private static final com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance();
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TextExtractionPredictionInstance> PARSER =
new com.google.protobuf.AbstractParser<TextExtractionPredictionInstance>() {
@java.lang.Override
public TextExtractionPredictionInstance parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TextExtractionPredictionInstance> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TextExtractionPredictionInstance> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.schema.predict.instance
.TextExtractionPredictionInstance
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-api-java-client-services | 35,956 | clients/google-api-services-notebooks/v1/2.0.0/com/google/api/services/notebooks/v1/model/Instance.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.notebooks.v1.model;
/**
* The definition of a notebook instance.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Notebooks API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Instance extends com.google.api.client.json.GenericJson {
/**
* The hardware accelerator used on this instance. If you use accelerators, make sure that your
* configuration has [enough vCPUs and memory to support the `machine_type` you have
* selected](https://cloud.google.com/compute/docs/gpus/#gpus-list).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AcceleratorConfig acceleratorConfig;
/**
* Input only. The size of the boot disk in GB attached to this instance, up to a maximum of 64000
* GB (64 TB). The minimum recommended value is 100 GB. If not specified, this defaults to 100.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long bootDiskSizeGb;
/**
* Input only. The type of the boot disk attached to this instance, defaults to standard
* persistent disk (`PD_STANDARD`).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String bootDiskType;
/**
* Optional. Flag to enable ip forwarding or not, default false/off.
* https://cloud.google.com/vpc/docs/using-routes#canipforward
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canIpForward;
/**
* Use a container image to start the notebook instance.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ContainerImage containerImage;
/**
* Output only. Instance creation time.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String createTime;
/**
* Output only. Email address of entity that sent original CreateInstance request.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String creator;
/**
* Specify a custom Cloud Storage path where the GPU driver is stored. If not specified, we'll
* automatically choose from official GPU drivers.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String customGpuDriverPath;
/**
* Input only. The size of the data disk in GB attached to this instance, up to a maximum of 64000
* GB (64 TB). You can choose the size of the data disk based on how big your notebooks and data
* are. If not specified, this defaults to 100.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long dataDiskSizeGb;
/**
* Input only. The type of the data disk attached to this instance, defaults to standard
* persistent disk (`PD_STANDARD`).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String dataDiskType;
/**
* Input only. Disk encryption method used on the boot and data disks, defaults to GMEK.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String diskEncryption;
/**
* Output only. Attached disks to notebook instance.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<Disk> disks;
static {
// hack to force ProGuard to consider Disk used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(Disk.class);
}
/**
* Whether the end user authorizes Google Cloud to install GPU driver on this instance. If this
* field is empty or set to false, the GPU driver won't be installed. Only applicable to instances
* with GPUs.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean installGpuDriver;
/**
* Output only. Checks how feasible a migration from UmN to WbI is.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private InstanceMigrationEligibility instanceMigrationEligibility;
/**
* Input only. The owner of this instance after creation. Format: `alias@example.com` Currently
* supports one owner only. If not specified, all of the service account users of your VM
* instance's service account can use the instance.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> instanceOwners;
/**
* Input only. The KMS key used to encrypt the disks, only applicable if disk_encryption is CMEK.
* Format: `projects/{project_id}/locations/{location}/keyRings/{key_ring_id}/cryptoKeys/{key_id}`
* Learn more about [using your own encryption keys](/kms/docs/quickstart).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kmsKey;
/**
* Labels to apply to this instance. These can be later modified by the setLabels method.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, java.lang.String> labels;
/**
* Required. The [Compute Engine machine type](https://cloud.google.com/compute/docs/machine-
* resource) of this instance.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String machineType;
/**
* Custom metadata to apply to this instance. For example, to specify a Cloud Storage bucket for
* automatic backup, you can use the `gcs-data-bucket` metadata tag. Format: `"--metadata=gcs-
* data-bucket=BUCKET"`.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, java.lang.String> metadata;
/**
* Output only. Bool indicating whether this notebook has been migrated to a Workbench Instance
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean migrated;
/**
* Output only. The name of this notebook instance. Format:
* `projects/{project_id}/locations/{location}/instances/{instance_id}`
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* The name of the VPC that this instance is in. Format:
* `projects/{project_id}/global/networks/{network_id}`
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String network;
/**
* Optional. The type of vNIC to be used on this interface. This may be gVNIC or VirtioNet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String nicType;
/**
* If true, the notebook instance will not register with the proxy.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean noProxyAccess;
/**
* If true, no external IP will be assigned to this instance.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean noPublicIp;
/**
* Input only. If true, the data disk will not be auto deleted when deleting the instance.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean noRemoveDataDisk;
/**
* Path to a Bash script that automatically runs after a notebook instance fully boots up. The
* path must be a URL or Cloud Storage path (`gs://path-to-file/file-name`).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String postStartupScript;
/**
* Output only. The proxy endpoint that is used to access the Jupyter notebook.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String proxyUri;
/**
* Optional. The optional reservation affinity. Setting this field will apply the specified [Zonal
* Compute Reservation](https://cloud.google.com/compute/docs/instances/reserving-zonal-resources)
* to this notebook instance.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ReservationAffinity reservationAffinity;
/**
* The service account on this instance, giving access to other Google Cloud services. You can use
* any service account within the same project, but you must have the service account user
* permission to use the instance. If not specified, the [Compute Engine default service
* account](https://cloud.google.com/compute/docs/access/service-accounts#default_service_account)
* is used.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String serviceAccount;
/**
* Optional. The URIs of service account scopes to be included in Compute Engine instances. If not
* specified, the following [scopes](https://cloud.google.com/compute/docs/access/service-
* accounts#accesscopesiam) are defined: - https://www.googleapis.com/auth/cloud-platform -
* https://www.googleapis.com/auth/userinfo.email If not using default scopes, you need at least:
* https://www.googleapis.com/auth/compute
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> serviceAccountScopes;
/**
* Optional. Shielded VM configuration. [Images using supported Shielded VM
* features](https://cloud.google.com/compute/docs/instances/modifying-shielded-vm).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ShieldedInstanceConfig shieldedInstanceConfig;
/**
* Output only. The state of this instance.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String state;
/**
* The name of the subnet that this instance is in. Format:
* `projects/{project_id}/regions/{region}/subnetworks/{subnetwork_id}`
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String subnet;
/**
* Optional. The Compute Engine network tags to add to runtime (see [Add network
* tags](https://cloud.google.com/vpc/docs/add-remove-network-tags)).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> tags;
/**
* Output only. Instance update time.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String updateTime;
/**
* The upgrade history of this instance.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<UpgradeHistoryEntry> upgradeHistory;
/**
* Use a Compute Engine VM image to start the notebook instance.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private VmImage vmImage;
/**
* The hardware accelerator used on this instance. If you use accelerators, make sure that your
* configuration has [enough vCPUs and memory to support the `machine_type` you have
* selected](https://cloud.google.com/compute/docs/gpus/#gpus-list).
* @return value or {@code null} for none
*/
public AcceleratorConfig getAcceleratorConfig() {
return acceleratorConfig;
}
/**
* The hardware accelerator used on this instance. If you use accelerators, make sure that your
* configuration has [enough vCPUs and memory to support the `machine_type` you have
* selected](https://cloud.google.com/compute/docs/gpus/#gpus-list).
* @param acceleratorConfig acceleratorConfig or {@code null} for none
*/
public Instance setAcceleratorConfig(AcceleratorConfig acceleratorConfig) {
this.acceleratorConfig = acceleratorConfig;
return this;
}
/**
* Input only. The size of the boot disk in GB attached to this instance, up to a maximum of 64000
* GB (64 TB). The minimum recommended value is 100 GB. If not specified, this defaults to 100.
* @return value or {@code null} for none
*/
public java.lang.Long getBootDiskSizeGb() {
return bootDiskSizeGb;
}
/**
* Input only. The size of the boot disk in GB attached to this instance, up to a maximum of 64000
* GB (64 TB). The minimum recommended value is 100 GB. If not specified, this defaults to 100.
* @param bootDiskSizeGb bootDiskSizeGb or {@code null} for none
*/
public Instance setBootDiskSizeGb(java.lang.Long bootDiskSizeGb) {
this.bootDiskSizeGb = bootDiskSizeGb;
return this;
}
/**
* Input only. The type of the boot disk attached to this instance, defaults to standard
* persistent disk (`PD_STANDARD`).
* @return value or {@code null} for none
*/
public java.lang.String getBootDiskType() {
return bootDiskType;
}
/**
* Input only. The type of the boot disk attached to this instance, defaults to standard
* persistent disk (`PD_STANDARD`).
* @param bootDiskType bootDiskType or {@code null} for none
*/
public Instance setBootDiskType(java.lang.String bootDiskType) {
this.bootDiskType = bootDiskType;
return this;
}
/**
* Optional. Flag to enable ip forwarding or not, default false/off.
* https://cloud.google.com/vpc/docs/using-routes#canipforward
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanIpForward() {
return canIpForward;
}
/**
* Optional. Flag to enable ip forwarding or not, default false/off.
* https://cloud.google.com/vpc/docs/using-routes#canipforward
* @param canIpForward canIpForward or {@code null} for none
*/
public Instance setCanIpForward(java.lang.Boolean canIpForward) {
this.canIpForward = canIpForward;
return this;
}
/**
* Use a container image to start the notebook instance.
* @return value or {@code null} for none
*/
public ContainerImage getContainerImage() {
return containerImage;
}
/**
* Use a container image to start the notebook instance.
* @param containerImage containerImage or {@code null} for none
*/
public Instance setContainerImage(ContainerImage containerImage) {
this.containerImage = containerImage;
return this;
}
/**
* Output only. Instance creation time.
* @return value or {@code null} for none
*/
public String getCreateTime() {
return createTime;
}
/**
* Output only. Instance creation time.
* @param createTime createTime or {@code null} for none
*/
public Instance setCreateTime(String createTime) {
this.createTime = createTime;
return this;
}
/**
* Output only. Email address of entity that sent original CreateInstance request.
* @return value or {@code null} for none
*/
public java.lang.String getCreator() {
return creator;
}
/**
* Output only. Email address of entity that sent original CreateInstance request.
* @param creator creator or {@code null} for none
*/
public Instance setCreator(java.lang.String creator) {
this.creator = creator;
return this;
}
/**
* Specify a custom Cloud Storage path where the GPU driver is stored. If not specified, we'll
* automatically choose from official GPU drivers.
* @return value or {@code null} for none
*/
public java.lang.String getCustomGpuDriverPath() {
return customGpuDriverPath;
}
/**
* Specify a custom Cloud Storage path where the GPU driver is stored. If not specified, we'll
* automatically choose from official GPU drivers.
* @param customGpuDriverPath customGpuDriverPath or {@code null} for none
*/
public Instance setCustomGpuDriverPath(java.lang.String customGpuDriverPath) {
this.customGpuDriverPath = customGpuDriverPath;
return this;
}
/**
* Input only. The size of the data disk in GB attached to this instance, up to a maximum of 64000
* GB (64 TB). You can choose the size of the data disk based on how big your notebooks and data
* are. If not specified, this defaults to 100.
* @return value or {@code null} for none
*/
public java.lang.Long getDataDiskSizeGb() {
return dataDiskSizeGb;
}
/**
* Input only. The size of the data disk in GB attached to this instance, up to a maximum of 64000
* GB (64 TB). You can choose the size of the data disk based on how big your notebooks and data
* are. If not specified, this defaults to 100.
* @param dataDiskSizeGb dataDiskSizeGb or {@code null} for none
*/
public Instance setDataDiskSizeGb(java.lang.Long dataDiskSizeGb) {
this.dataDiskSizeGb = dataDiskSizeGb;
return this;
}
/**
* Input only. The type of the data disk attached to this instance, defaults to standard
* persistent disk (`PD_STANDARD`).
* @return value or {@code null} for none
*/
public java.lang.String getDataDiskType() {
return dataDiskType;
}
/**
* Input only. The type of the data disk attached to this instance, defaults to standard
* persistent disk (`PD_STANDARD`).
* @param dataDiskType dataDiskType or {@code null} for none
*/
public Instance setDataDiskType(java.lang.String dataDiskType) {
this.dataDiskType = dataDiskType;
return this;
}
/**
* Input only. Disk encryption method used on the boot and data disks, defaults to GMEK.
* @return value or {@code null} for none
*/
public java.lang.String getDiskEncryption() {
return diskEncryption;
}
/**
* Input only. Disk encryption method used on the boot and data disks, defaults to GMEK.
* @param diskEncryption diskEncryption or {@code null} for none
*/
public Instance setDiskEncryption(java.lang.String diskEncryption) {
this.diskEncryption = diskEncryption;
return this;
}
/**
* Output only. Attached disks to notebook instance.
* @return value or {@code null} for none
*/
public java.util.List<Disk> getDisks() {
return disks;
}
/**
* Output only. Attached disks to notebook instance.
* @param disks disks or {@code null} for none
*/
public Instance setDisks(java.util.List<Disk> disks) {
this.disks = disks;
return this;
}
/**
* Whether the end user authorizes Google Cloud to install GPU driver on this instance. If this
* field is empty or set to false, the GPU driver won't be installed. Only applicable to instances
* with GPUs.
* @return value or {@code null} for none
*/
public java.lang.Boolean getInstallGpuDriver() {
return installGpuDriver;
}
/**
* Whether the end user authorizes Google Cloud to install GPU driver on this instance. If this
* field is empty or set to false, the GPU driver won't be installed. Only applicable to instances
* with GPUs.
* @param installGpuDriver installGpuDriver or {@code null} for none
*/
public Instance setInstallGpuDriver(java.lang.Boolean installGpuDriver) {
this.installGpuDriver = installGpuDriver;
return this;
}
/**
* Output only. Checks how feasible a migration from UmN to WbI is.
* @return value or {@code null} for none
*/
public InstanceMigrationEligibility getInstanceMigrationEligibility() {
return instanceMigrationEligibility;
}
/**
* Output only. Checks how feasible a migration from UmN to WbI is.
* @param instanceMigrationEligibility instanceMigrationEligibility or {@code null} for none
*/
public Instance setInstanceMigrationEligibility(InstanceMigrationEligibility instanceMigrationEligibility) {
this.instanceMigrationEligibility = instanceMigrationEligibility;
return this;
}
/**
* Input only. The owner of this instance after creation. Format: `alias@example.com` Currently
* supports one owner only. If not specified, all of the service account users of your VM
* instance's service account can use the instance.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getInstanceOwners() {
return instanceOwners;
}
/**
* Input only. The owner of this instance after creation. Format: `alias@example.com` Currently
* supports one owner only. If not specified, all of the service account users of your VM
* instance's service account can use the instance.
* @param instanceOwners instanceOwners or {@code null} for none
*/
public Instance setInstanceOwners(java.util.List<java.lang.String> instanceOwners) {
this.instanceOwners = instanceOwners;
return this;
}
/**
* Input only. The KMS key used to encrypt the disks, only applicable if disk_encryption is CMEK.
* Format: `projects/{project_id}/locations/{location}/keyRings/{key_ring_id}/cryptoKeys/{key_id}`
* Learn more about [using your own encryption keys](/kms/docs/quickstart).
* @return value or {@code null} for none
*/
public java.lang.String getKmsKey() {
return kmsKey;
}
/**
* Input only. The KMS key used to encrypt the disks, only applicable if disk_encryption is CMEK.
* Format: `projects/{project_id}/locations/{location}/keyRings/{key_ring_id}/cryptoKeys/{key_id}`
* Learn more about [using your own encryption keys](/kms/docs/quickstart).
* @param kmsKey kmsKey or {@code null} for none
*/
public Instance setKmsKey(java.lang.String kmsKey) {
this.kmsKey = kmsKey;
return this;
}
/**
* Labels to apply to this instance. These can be later modified by the setLabels method.
* @return value or {@code null} for none
*/
public java.util.Map<String, java.lang.String> getLabels() {
return labels;
}
/**
* Labels to apply to this instance. These can be later modified by the setLabels method.
* @param labels labels or {@code null} for none
*/
public Instance setLabels(java.util.Map<String, java.lang.String> labels) {
this.labels = labels;
return this;
}
/**
* Required. The [Compute Engine machine type](https://cloud.google.com/compute/docs/machine-
* resource) of this instance.
* @return value or {@code null} for none
*/
public java.lang.String getMachineType() {
return machineType;
}
/**
* Required. The [Compute Engine machine type](https://cloud.google.com/compute/docs/machine-
* resource) of this instance.
* @param machineType machineType or {@code null} for none
*/
public Instance setMachineType(java.lang.String machineType) {
this.machineType = machineType;
return this;
}
/**
* Custom metadata to apply to this instance. For example, to specify a Cloud Storage bucket for
* automatic backup, you can use the `gcs-data-bucket` metadata tag. Format: `"--metadata=gcs-
* data-bucket=BUCKET"`.
* @return value or {@code null} for none
*/
public java.util.Map<String, java.lang.String> getMetadata() {
return metadata;
}
/**
* Custom metadata to apply to this instance. For example, to specify a Cloud Storage bucket for
* automatic backup, you can use the `gcs-data-bucket` metadata tag. Format: `"--metadata=gcs-
* data-bucket=BUCKET"`.
* @param metadata metadata or {@code null} for none
*/
public Instance setMetadata(java.util.Map<String, java.lang.String> metadata) {
this.metadata = metadata;
return this;
}
/**
* Output only. Bool indicating whether this notebook has been migrated to a Workbench Instance
* @return value or {@code null} for none
*/
public java.lang.Boolean getMigrated() {
return migrated;
}
/**
* Output only. Bool indicating whether this notebook has been migrated to a Workbench Instance
* @param migrated migrated or {@code null} for none
*/
public Instance setMigrated(java.lang.Boolean migrated) {
this.migrated = migrated;
return this;
}
/**
* Output only. The name of this notebook instance. Format:
* `projects/{project_id}/locations/{location}/instances/{instance_id}`
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Output only. The name of this notebook instance. Format:
* `projects/{project_id}/locations/{location}/instances/{instance_id}`
* @param name name or {@code null} for none
*/
public Instance setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* The name of the VPC that this instance is in. Format:
* `projects/{project_id}/global/networks/{network_id}`
* @return value or {@code null} for none
*/
public java.lang.String getNetwork() {
return network;
}
/**
* The name of the VPC that this instance is in. Format:
* `projects/{project_id}/global/networks/{network_id}`
* @param network network or {@code null} for none
*/
public Instance setNetwork(java.lang.String network) {
this.network = network;
return this;
}
/**
* Optional. The type of vNIC to be used on this interface. This may be gVNIC or VirtioNet.
* @return value or {@code null} for none
*/
public java.lang.String getNicType() {
return nicType;
}
/**
* Optional. The type of vNIC to be used on this interface. This may be gVNIC or VirtioNet.
* @param nicType nicType or {@code null} for none
*/
public Instance setNicType(java.lang.String nicType) {
this.nicType = nicType;
return this;
}
/**
* If true, the notebook instance will not register with the proxy.
* @return value or {@code null} for none
*/
public java.lang.Boolean getNoProxyAccess() {
return noProxyAccess;
}
/**
* If true, the notebook instance will not register with the proxy.
* @param noProxyAccess noProxyAccess or {@code null} for none
*/
public Instance setNoProxyAccess(java.lang.Boolean noProxyAccess) {
this.noProxyAccess = noProxyAccess;
return this;
}
/**
* If true, no external IP will be assigned to this instance.
* @return value or {@code null} for none
*/
public java.lang.Boolean getNoPublicIp() {
return noPublicIp;
}
/**
* If true, no external IP will be assigned to this instance.
* @param noPublicIp noPublicIp or {@code null} for none
*/
public Instance setNoPublicIp(java.lang.Boolean noPublicIp) {
this.noPublicIp = noPublicIp;
return this;
}
/**
* Input only. If true, the data disk will not be auto deleted when deleting the instance.
* @return value or {@code null} for none
*/
public java.lang.Boolean getNoRemoveDataDisk() {
return noRemoveDataDisk;
}
/**
* Input only. If true, the data disk will not be auto deleted when deleting the instance.
* @param noRemoveDataDisk noRemoveDataDisk or {@code null} for none
*/
public Instance setNoRemoveDataDisk(java.lang.Boolean noRemoveDataDisk) {
this.noRemoveDataDisk = noRemoveDataDisk;
return this;
}
/**
* Path to a Bash script that automatically runs after a notebook instance fully boots up. The
* path must be a URL or Cloud Storage path (`gs://path-to-file/file-name`).
* @return value or {@code null} for none
*/
public java.lang.String getPostStartupScript() {
return postStartupScript;
}
/**
* Path to a Bash script that automatically runs after a notebook instance fully boots up. The
* path must be a URL or Cloud Storage path (`gs://path-to-file/file-name`).
* @param postStartupScript postStartupScript or {@code null} for none
*/
public Instance setPostStartupScript(java.lang.String postStartupScript) {
this.postStartupScript = postStartupScript;
return this;
}
/**
* Output only. The proxy endpoint that is used to access the Jupyter notebook.
* @return value or {@code null} for none
*/
public java.lang.String getProxyUri() {
return proxyUri;
}
/**
* Output only. The proxy endpoint that is used to access the Jupyter notebook.
* @param proxyUri proxyUri or {@code null} for none
*/
public Instance setProxyUri(java.lang.String proxyUri) {
this.proxyUri = proxyUri;
return this;
}
/**
* Optional. The optional reservation affinity. Setting this field will apply the specified [Zonal
* Compute Reservation](https://cloud.google.com/compute/docs/instances/reserving-zonal-resources)
* to this notebook instance.
* @return value or {@code null} for none
*/
public ReservationAffinity getReservationAffinity() {
return reservationAffinity;
}
/**
* Optional. The optional reservation affinity. Setting this field will apply the specified [Zonal
* Compute Reservation](https://cloud.google.com/compute/docs/instances/reserving-zonal-resources)
* to this notebook instance.
* @param reservationAffinity reservationAffinity or {@code null} for none
*/
public Instance setReservationAffinity(ReservationAffinity reservationAffinity) {
this.reservationAffinity = reservationAffinity;
return this;
}
/**
* The service account on this instance, giving access to other Google Cloud services. You can use
* any service account within the same project, but you must have the service account user
* permission to use the instance. If not specified, the [Compute Engine default service
* account](https://cloud.google.com/compute/docs/access/service-accounts#default_service_account)
* is used.
* @return value or {@code null} for none
*/
public java.lang.String getServiceAccount() {
return serviceAccount;
}
/**
* The service account on this instance, giving access to other Google Cloud services. You can use
* any service account within the same project, but you must have the service account user
* permission to use the instance. If not specified, the [Compute Engine default service
* account](https://cloud.google.com/compute/docs/access/service-accounts#default_service_account)
* is used.
* @param serviceAccount serviceAccount or {@code null} for none
*/
public Instance setServiceAccount(java.lang.String serviceAccount) {
this.serviceAccount = serviceAccount;
return this;
}
/**
* Optional. The URIs of service account scopes to be included in Compute Engine instances. If not
* specified, the following [scopes](https://cloud.google.com/compute/docs/access/service-
* accounts#accesscopesiam) are defined: - https://www.googleapis.com/auth/cloud-platform -
* https://www.googleapis.com/auth/userinfo.email If not using default scopes, you need at least:
* https://www.googleapis.com/auth/compute
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getServiceAccountScopes() {
return serviceAccountScopes;
}
/**
* Optional. The URIs of service account scopes to be included in Compute Engine instances. If not
* specified, the following [scopes](https://cloud.google.com/compute/docs/access/service-
* accounts#accesscopesiam) are defined: - https://www.googleapis.com/auth/cloud-platform -
* https://www.googleapis.com/auth/userinfo.email If not using default scopes, you need at least:
* https://www.googleapis.com/auth/compute
* @param serviceAccountScopes serviceAccountScopes or {@code null} for none
*/
public Instance setServiceAccountScopes(java.util.List<java.lang.String> serviceAccountScopes) {
this.serviceAccountScopes = serviceAccountScopes;
return this;
}
/**
* Optional. Shielded VM configuration. [Images using supported Shielded VM
* features](https://cloud.google.com/compute/docs/instances/modifying-shielded-vm).
* @return value or {@code null} for none
*/
public ShieldedInstanceConfig getShieldedInstanceConfig() {
return shieldedInstanceConfig;
}
/**
* Optional. Shielded VM configuration. [Images using supported Shielded VM
* features](https://cloud.google.com/compute/docs/instances/modifying-shielded-vm).
* @param shieldedInstanceConfig shieldedInstanceConfig or {@code null} for none
*/
public Instance setShieldedInstanceConfig(ShieldedInstanceConfig shieldedInstanceConfig) {
this.shieldedInstanceConfig = shieldedInstanceConfig;
return this;
}
/**
* Output only. The state of this instance.
* @return value or {@code null} for none
*/
public java.lang.String getState() {
return state;
}
/**
* Output only. The state of this instance.
* @param state state or {@code null} for none
*/
public Instance setState(java.lang.String state) {
this.state = state;
return this;
}
/**
* The name of the subnet that this instance is in. Format:
* `projects/{project_id}/regions/{region}/subnetworks/{subnetwork_id}`
* @return value or {@code null} for none
*/
public java.lang.String getSubnet() {
return subnet;
}
/**
* The name of the subnet that this instance is in. Format:
* `projects/{project_id}/regions/{region}/subnetworks/{subnetwork_id}`
* @param subnet subnet or {@code null} for none
*/
public Instance setSubnet(java.lang.String subnet) {
this.subnet = subnet;
return this;
}
/**
* Optional. The Compute Engine network tags to add to runtime (see [Add network
* tags](https://cloud.google.com/vpc/docs/add-remove-network-tags)).
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getTags() {
return tags;
}
/**
* Optional. The Compute Engine network tags to add to runtime (see [Add network
* tags](https://cloud.google.com/vpc/docs/add-remove-network-tags)).
* @param tags tags or {@code null} for none
*/
public Instance setTags(java.util.List<java.lang.String> tags) {
this.tags = tags;
return this;
}
/**
* Output only. Instance update time.
* @return value or {@code null} for none
*/
public String getUpdateTime() {
return updateTime;
}
/**
* Output only. Instance update time.
* @param updateTime updateTime or {@code null} for none
*/
public Instance setUpdateTime(String updateTime) {
this.updateTime = updateTime;
return this;
}
/**
* The upgrade history of this instance.
* @return value or {@code null} for none
*/
public java.util.List<UpgradeHistoryEntry> getUpgradeHistory() {
return upgradeHistory;
}
/**
* The upgrade history of this instance.
* @param upgradeHistory upgradeHistory or {@code null} for none
*/
public Instance setUpgradeHistory(java.util.List<UpgradeHistoryEntry> upgradeHistory) {
this.upgradeHistory = upgradeHistory;
return this;
}
/**
* Use a Compute Engine VM image to start the notebook instance.
* @return value or {@code null} for none
*/
public VmImage getVmImage() {
return vmImage;
}
/**
* Use a Compute Engine VM image to start the notebook instance.
* @param vmImage vmImage or {@code null} for none
*/
public Instance setVmImage(VmImage vmImage) {
this.vmImage = vmImage;
return this;
}
@Override
public Instance set(String fieldName, Object value) {
return (Instance) super.set(fieldName, value);
}
@Override
public Instance clone() {
return (Instance) super.clone();
}
}
|
apache/kylin | 36,056 | src/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.job.execution;
import static org.apache.kylin.job.execution.JobTypeEnum.Category.INTERNAL;
import static org.apache.kylin.job.execution.JobTypeEnum.Category.OTHER;
import static org.apache.kylin.job.execution.JobTypeEnum.Category.SNAPSHOT;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.mail.MailNotificationType;
import org.apache.kylin.common.mail.MailNotifier;
import org.apache.kylin.common.metrics.MetricsCategory;
import org.apache.kylin.common.metrics.MetricsGroup;
import org.apache.kylin.common.metrics.MetricsName;
import org.apache.kylin.common.util.RandomUtil;
import org.apache.kylin.common.util.StringHelper;
import org.apache.kylin.common.util.ThrowableUtils;
import org.apache.kylin.guava30.shaded.common.annotations.VisibleForTesting;
import org.apache.kylin.guava30.shaded.common.base.MoreObjects;
import org.apache.kylin.guava30.shaded.common.base.Preconditions;
import org.apache.kylin.guava30.shaded.common.base.Throwables;
import org.apache.kylin.guava30.shaded.common.collect.Lists;
import org.apache.kylin.guava30.shaded.common.collect.Maps;
import org.apache.kylin.guava30.shaded.common.collect.Sets;
import org.apache.kylin.job.JobContext;
import org.apache.kylin.job.core.AbstractJobExecutable;
import org.apache.kylin.job.dao.ExecutableOutputPO;
import org.apache.kylin.job.dao.ExecutablePO;
import org.apache.kylin.job.exception.ExecuteException;
import org.apache.kylin.job.exception.JobStoppedException;
import org.apache.kylin.job.exception.JobStoppedNonVoluntarilyException;
import org.apache.kylin.job.mail.JobMailUtil;
import org.apache.kylin.job.util.JobContextUtil;
import org.apache.kylin.metadata.cube.model.NBatchConstants;
import org.apache.kylin.metadata.cube.model.NDataLayout;
import org.apache.kylin.metadata.model.NDataModel;
import org.apache.kylin.metadata.model.NDataModelManager;
import org.apache.kylin.metadata.project.NProjectManager;
import org.apache.kylin.metadata.project.ProjectInstance;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.Getter;
import lombok.Setter;
import lombok.val;
import lombok.var;
import lombok.experimental.Delegate;
/**
*/
public abstract class AbstractExecutable extends AbstractJobExecutable implements Executable {
public interface Callback {
void process() throws Exception;
}
protected static final String SUBMITTER = "submitter";
protected static final String PARENT_ID = "parentId";
private static final Integer DEFAULT_DRIVER_MEMORY = 512;
public static final String RUNTIME_INFO = "runtimeInfo";
public static final String DEPENDENT_FILES = "dependentFiles";
protected static final Logger logger = LoggerFactory.getLogger(AbstractExecutable.class);
protected int retry = 0;
@Getter
@Setter
private String name;
@Getter
@Setter
private JobTypeEnum jobType;
@Getter
@Setter
private String logPath;
@Setter
@Getter
// uuid of the model or table identity if table sampling
// or internal table
private String targetSubject;
@Setter
@Getter
private List<String> targetSegments = Lists.newArrayList();//uuid of related segments
@Getter
@Setter
private String id;
@Getter
@Setter
private boolean resumable = false;
@Delegate
private ExecutableParams executableParams = new ExecutableParams();
protected String project;
protected JobContext context;
@Getter
@Setter
private Map<String, Object> runTimeInfo = Maps.newHashMap();
@Setter
@Getter
private Set<Long> targetPartitions = Sets.newHashSet();
public boolean isBucketJob() {
return CollectionUtils.isNotEmpty(targetPartitions);
}
@Getter
@Setter
private int priority = ExecutablePO.DEFAULT_PRIORITY;
@Getter
@Setter
private Object tag;
@Getter
@Setter
private int stepId = -1;
@Getter
@Setter
private ExecutablePO po;
@Getter
@Setter
private JobSchedulerModeEnum jobSchedulerMode = JobSchedulerModeEnum.CHAIN;
@Getter
@Setter
private String previousStep;
@Getter
@Setter
private Set<String> nextSteps = Sets.newHashSet();
public String getTargetModelAlias() {
val modelManager = NDataModelManager.getInstance(getConfig(), getProject());
NDataModel dataModelDesc = NDataModelManager.getInstance(getConfig(), getProject())
.getDataModelDesc(targetSubject);
if (dataModelDesc != null) {
if (modelManager.isModelBroken(targetSubject)) {
return modelManager.getDataModelDescWithoutInit(targetSubject).getAlias();
} else {
return dataModelDesc.getFusionModelAlias();
}
}
return null;
}
public String getTargetModelId() {
return getTargetModelId(getProject(), targetSubject);
}
public static String getTargetModelId(String project, String targetSubject) {
val modelManager = NDataModelManager.getInstance(KylinConfig.getInstanceFromEnv(), project);
NDataModel dataModelDesc = modelManager.getDataModelDesc(targetSubject);
if (dataModelDesc == null)
return null;
return modelManager.isModelBroken(targetSubject)
? modelManager.getDataModelDescWithoutInit(targetSubject).getId()
: dataModelDesc.getId();
}
public String getTargetSubjectAlias() {
return getTargetModelAlias();
}
public AbstractExecutable() {
setId(RandomUtil.randomUUIDStr());
}
public AbstractExecutable(Object notSetId) {
}
public void cancelJob() {
}
/**
* jude it will cause segment-holes or not if discard this job
* @return true by default
*/
public boolean safetyIfDiscard() {
return true;
}
protected KylinConfig getConfig() {
return KylinConfig.getInstanceFromEnv();
}
protected ExecutableManager getManager() {
return getExecutableManager(project);
}
/**
* for job steps, they need to update step status and throw exception
* so they need to use wrapWithCheckQuit
*/
protected void wrapWithCheckQuit(Callback f) throws JobStoppedException {
boolean tryAgain = true;
while (tryAgain) {
checkNeedQuit(true);
// in this short period user might changed job state, say restart
// if a worker thread is unaware of this, it may go ahead and register step 1 as succeed here.
// However the user expects a total RESTART
tryAgain = false;
try {
JobContextUtil.withTxAndRetry(() -> {
checkNeedQuit(false);
f.process();
return true;
});
} catch (Exception e) {
if (Throwables.getCausalChain(e).stream().anyMatch(x -> x instanceof JobStoppedException)) {
// "in this short period user might change job state" happens
logger.info("[LESS_LIKELY_THINGS_HAPPENED] JobStoppedException thrown from in a UnitOfWork", e);
tryAgain = true;
} else {
throw new JobStoppedException(e);
}
}
}
}
protected void onExecuteStart() throws JobStoppedException {
wrapWithCheckQuit(() -> {
updateJobOutput(project, getId(), ExecutableState.RUNNING, null, null, null);
});
}
protected void onExecuteFinished(ExecuteResult result) throws ExecuteException {
logger.info("Execute finished {}, state:{}", this.getDisplayName(), result.state());
MetricsGroup.hostTagCounterInc(MetricsName.JOB_STEP_ATTEMPTED, MetricsCategory.PROJECT, project, retry);
if (result.succeed()) {
wrapWithCheckQuit(() -> {
ExecutableState state = adjustState(ExecutableState.SUCCEED);
logger.info("Job {} adjust future state from {} to {}", getId(), ExecutableState.SUCCEED.name(),
state.name());
updateJobOutput(project, getId(), state, result.getExtraInfo(), result.output(), null);
});
} else if (result.skip()) {
wrapWithCheckQuit(() -> {
updateJobOutput(project, getId(), ExecutableState.SKIP, result.getExtraInfo(), result.output(), null);
});
} else {
MetricsGroup.hostTagCounterInc(MetricsName.JOB_FAILED_STEP_ATTEMPTED, MetricsCategory.PROJECT, project,
retry);
wrapWithCheckQuit(() -> {
updateJobOutput(project, getId(), ExecutableState.ERROR, result.getExtraInfo(), result.getErrorMsg(),
result.getShortErrMsg(), this::onExecuteErrorHook);
killOtherPipelineApplicationOrUpdateOtherPipelineStepStatus();
});
throw new ExecuteException(result.getThrowable());
}
}
public void onExecuteStopHook() {
onExecuteErrorHook(getId());
}
protected ExecutableState adjustState(ExecutableState originalState) {
return originalState;
}
protected void onExecuteErrorHook(String jobId) {
// At present, only instance of DefaultExecutableOnModel take full advantage of this method.
}
public void updateJobOutput(String project, String jobId, ExecutableState newStatus, Map<String, String> info,
String output, Consumer<String> hook) {
updateJobOutput(project, jobId, newStatus, info, output, null, hook);
}
public void updateJobOutput(String project, String jobId, ExecutableState newStatus, Map<String, String> info,
String output, String failedMsg, Consumer<String> hook) {
updateJobOutput(project, jobId, newStatus, info, output, this.getLogPath(), failedMsg, hook);
}
public void updateJobOutput(String project, String jobId, ExecutableState newStatus, Map<String, String> info,
String output, String logPath, String failedMsg, Consumer<String> hook) {
JobContextUtil.withTxAndRetry(() -> {
ExecutableManager executableManager = getExecutableManager(project);
val existedInfo = executableManager.getOutput(jobId).getExtra();
if (info != null) {
existedInfo.putAll(info);
}
//The output will be stored in HDFS,not in RS
if (this instanceof ChainedStageExecutable) {
if (newStatus.isNotBad()) {
executableManager.makeStageSuccess(jobId);
} else if (newStatus == ExecutableState.ERROR) {
executableManager.makeStageError(jobId);
}
}
executableManager.updateJobOutput(jobId, newStatus, existedInfo, null, null, 0, failedMsg);
return true;
});
if (hook != null) {
hook.accept(jobId);
}
//write output to HDFS
updateJobOutputToHDFS(project, jobId, output, logPath);
}
private static void updateJobOutputToHDFS(String project, String jobId, String output, String logPath) {
ExecutableManager executableManager = getExecutableManager(project);
ExecutableOutputPO jobOutput = executableManager.getJobOutput(jobId);
if (null != output) {
jobOutput.setContent(output);
}
if (null != logPath) {
jobOutput.setLogPath(logPath);
}
String outputHDFSPath = KylinConfig.getInstanceFromEnv().getJobTmpOutputStorePath(project, jobId);
executableManager.updateJobOutputToHDFS(outputHDFSPath, jobOutput);
}
protected static ExecutableManager getExecutableManager(String project) {
return ExecutableManager.getInstance(KylinConfig.getInstanceFromEnv(), project);
}
@Override
public final ExecuteResult execute(JobContext jobContext) throws ExecuteException {
logger.info("Executing AbstractExecutable {}", this.getDisplayName());
this.context = jobContext;
ExecuteResult result;
onExecuteStart();
do {
if (retry > 0) {
pauseOnRetry();
logger.info("Retrying for the {}th time ", retry);
}
try {
result = wrapWithExecuteException(() -> doWork(jobContext));
} catch (JobStoppedException jse) {
// job quits voluntarily or non-voluntarily, in this case, the job is "finished"
// we createSucceed() to run onExecuteFinished()
result = ExecuteResult.createSucceed();
} catch (Exception e) {
result = ExecuteResult.createError(e);
}
retry++;
//exception in ExecuteResult should handle by user itself.
} while (needRetry(this.retry, result.getThrowable()));
//check exception in result to avoid retry on ChainedExecutable(only need retry on subtask actually)
onExecuteFinished(result);
if (result.getThrowable() != null) {
result.getThrowable().printStackTrace();
}
return result;
}
protected void killOtherPipelineApplicationOrUpdateOtherPipelineStepStatus() {
logger.error("{} kill other piper line application or update other piper line step status", getDisplayName());
List<AbstractExecutable> otherPipelineRunningStep = getOtherPipelineRunningStep();
otherPipelineRunningStep.forEach(AbstractExecutable::killApplicationIfExistsOrUpdateStepStatus);
}
protected List<AbstractExecutable> getOtherPipelineRunningStep() {
val parent = getParent();
val previousStepId = getPreviousStep();
if (parent instanceof DefaultExecutable && parent.getJobSchedulerMode() == JobSchedulerModeEnum.DAG) {
val otherPipelineTasks = getOtherPipelineTasks((DefaultExecutable) parent, previousStepId);
val dagExecutablesMap = ((DefaultExecutable) parent).getTasks().stream()
.collect(Collectors.toMap(AbstractExecutable::getId, task -> task));
return otherPipelineTasks.stream()
.map(task -> getStepOrNextStepsWithStatus(task, dagExecutablesMap, ExecutableState.RUNNING))
.collect(ArrayList::new, ArrayList::addAll, ArrayList::addAll);
}
return Lists.newArrayList();
}
private List<AbstractExecutable> getOtherPipelineTasks(DefaultExecutable parent, String previousStepId) {
return parent.getTasks().stream() //
.filter(task -> StringUtils.equals(task.getPreviousStep(), previousStepId))
.filter(task -> !task.getId().equals(getId())).collect(Collectors.toList());
}
protected List<AbstractExecutable> getStepOrNextStepsWithStatus(AbstractExecutable executable,
Map<String, AbstractExecutable> dagExecutablesMap, ExecutableState state) {
if (executable.getStatus() == state) {
return Lists.newArrayList(executable);
}
return executable.getNextSteps().stream().map(dagExecutablesMap::get)
.map(step -> getStepOrNextStepsWithStatus(step, dagExecutablesMap, state))
.collect(ArrayList::new, ArrayList::addAll, ArrayList::addAll);
}
/**
* default UpdateStepStatus when other piper line step failed
*/
public void killApplicationIfExistsOrUpdateStepStatus() {
ExecutableManager executableManager = getExecutableManager(project);
executableManager.updateJobOutput(getId(), ExecutableState.PAUSED, null, null, null, 0, null);
}
protected void checkNeedQuit(boolean applyChange) throws JobStoppedException {
// non voluntarily
abortIfJobStopped(applyChange);
}
/**
* For non-chained executable, depend on its parent(instance of DefaultExecutable).
*/
public boolean checkSuicide() {
final AbstractExecutable parent = getParent();
if (parent == null) {
return false;
} else {
return parent.checkSuicide();
}
}
// If job need check external status change, override this method, by default return true.
protected boolean needCheckState() {
return true;
}
/**
* will throw exception if necessary!
*/
public void abortIfJobStopped(boolean applyChange) throws JobStoppedException {
if (!needCheckState()) {
return;
}
Boolean aborted = JobContextUtil.withTxAndRetry(() -> {
boolean abort = false;
val parent = getParent();
ExecutableState state = parent.getStatus();
switch (state) {
case READY:
case PENDING:
case PAUSED:
case DISCARDED:
// If a job is restarted(all steps' status changed to READY) or paused or discarded,
// the old thread may still be alive and attempt to update job output
//in this case the old thread should fail itself by calling this
if (applyChange) {
logger.debug("abort {} because parent job is {}", getId(), state);
updateJobOutput(project, getId(), state, null, null, null);
}
abort = true;
break;
default:
break;
}
return abort;
});
if (aborted) {
throw new JobStoppedNonVoluntarilyException();
}
}
// Retry will happen in below cases:
// 1) if property "kylin.job.retry-exception-classes" is not set or is null, all jobs with exceptions will retry according to the retry times.
// 2) if property "kylin.job.retry-exception-classes" is set and is not null, only jobs with the specified exceptions will retry according to the retry times.
public boolean needRetry(int retry, Throwable t) {
if (t == null || this instanceof DefaultExecutable)
return false;
if (retry > KylinConfig.getInstanceFromEnv().getJobRetry())
return false;
if (ThrowableUtils.isInterruptedException(t))
return false;
return isRetryableException(t.getClass().getName());
}
// pauseOnRetry should only works when retry has been triggered
private void pauseOnRetry() {
int interval = KylinConfig.getInstanceFromEnv().getJobRetryInterval();
logger.info("Pause {} milliseconds before retry", interval);
try {
TimeUnit.MILLISECONDS.sleep(interval);
} catch (InterruptedException e) {
logger.error("Job retry was interrupted, details: {}", e);
Thread.currentThread().interrupt();
}
}
private static boolean isRetryableException(String exceptionName) {
String[] jobRetryExceptions = KylinConfig.getInstanceFromEnv().getJobRetryExceptions();
return ArrayUtils.isEmpty(jobRetryExceptions) || ArrayUtils.contains(jobRetryExceptions, exceptionName);
}
protected abstract ExecuteResult doWork(JobContext context) throws ExecuteException;
@Override
public boolean isRunnable() {
return this.getStatus() == ExecutableState.PENDING;
}
public String getDisplayName() {
return this.name + " (" + this.id + ")";
}
@Override
public final ExecutableState getStatus() {
ExecutableManager manager = getManager();
return manager.getOutput(this.getId()).getState();
}
// This status is recorded when executable is inited.
// Use method 'getStatus' to get the last status.
public final ExecutableState getStatusInMem() {
return getStatus(getPo());
}
public final ExecutableState getStatus(ExecutablePO po) {
ExecutableManager manager = getManager();
return manager.getOutput(this.getId(), po).getState();
}
public final long getLastModified() {
return getLastModified(getOutput());
}
public static long getLastModified(Output output) {
return output.getLastModified();
}
public final long getByteSize() {
return getByteSize(getOutput());
}
public static long getByteSize(Output output) {
return output.getByteSize();
}
public boolean notifyUserIfNecessary(NDataLayout[] addOrUpdateCuboids) {
boolean hasEmptyLayout = false;
for (NDataLayout dataCuboid : addOrUpdateCuboids) {
if (dataCuboid.getRows() == 0) {
hasEmptyLayout = true;
break;
}
}
if (hasEmptyLayout && getConfig().isMailEnabled()) {
logger.info("Layout rows is 0, notify user");
return notifyUser(MailNotificationType.JOB_LOAD_EMPTY_DATA);
}
return false;
}
public boolean notifyUser(MailNotificationType notificationType) {
Preconditions.checkState((this instanceof DefaultExecutable) || this.getParent() instanceof DefaultExecutable);
val projectConfig = NProjectManager.getInstance(getConfig()).getProject(project).getConfig();
boolean needNotification = notificationType.needNotify(projectConfig);
if (!needNotification) {
logger.info("[{}] is not specified by user, not need to notify users.", notificationType.getDisplayName());
return false;
}
List<String> users = getAllNotifyUsers(projectConfig);
if (this instanceof DefaultExecutable) {
return MailNotifier.notifyUser(projectConfig, JobMailUtil.createMail(notificationType, this), users);
} else {
return MailNotifier.notifyUser(projectConfig, JobMailUtil.createMail(notificationType, this.getParent()),
users);
}
}
public void setSparkYarnQueueIfEnabled(String project, String yarnQueue) {
ProjectInstance proj = NProjectManager.getInstance(KylinConfig.getInstanceFromEnv()).getProject(project);
KylinConfig config = proj.getConfig();
// TODO check if valid queue
if (config.isSetYarnQueueInTaskEnabled() && config.getYarnQueueInTaskAvailable().contains(yarnQueue)) {
this.setSparkYarnQueue(yarnQueue);
}
}
public final AbstractExecutable getParent() {
return getManager().getJob(getParam(PARENT_ID));
}
public final AbstractExecutable getParent(ExecutablePO po) {
return getManager().getJob(getParam(PARENT_ID), po);
}
public void checkParentJobStatus() {
if (getParent().getStatus() != ExecutableState.RUNNING) {
throw new IllegalStateException("invalid parent job state, parent job:" + getParent().getDisplayName()
+ ", state:" + getParent().getStatus());
}
}
public final String getProject() {
if (project == null) {
throw new IllegalStateException("project is not set for abstract executable " + getId());
}
return project;
}
public final void setProject(String project) {
this.project = project;
}
public final String getJobId() {
return getId();
}
@Override
public final Output getOutput() {
return getManager().getOutput(getId());
}
public final Output getOutput(ExecutablePO executablePO) {
return getManager().getOutput(getId(), executablePO);
}
public final long getStartTime() {
return getStartTime(getOutput());
}
public static long getStartTime(Output output) {
return output.getStartTime();
}
public final long getEndTime() {
return getEndTime(getOutput());
}
public static long getEndTime(Output output) {
return output.getEndTime();
}
public final long getEndTime(ExecutablePO po) {
return getEndTime(getOutput(po));
}
public final Map<String, String> getExtraInfo() {
return getOutput().getExtra();
}
public final long getCreateTime() {
return getManager().getCreateTime(getId());
}
public static long getCreateTime(Output output) {
return output.getCreateTime();
}
// just using to get job duration in get job list
public long getDurationFromStepOrStageDurationSum(ExecutablePO executablePO) {
var duration = getDuration(executablePO);
if (this instanceof DagExecutable && getJobSchedulerMode() == JobSchedulerModeEnum.DAG) {
duration = calculateDagExecutableDuration(executablePO);
} else if (this instanceof ChainedExecutable) {
duration = calculateChainedExecutableDuration(executablePO);
}
return duration;
}
private long calculateDagExecutableDuration(ExecutablePO executablePO) {
val tasks = ((DagExecutable) this).getTasks();
val tasksMap = tasks.stream().collect(Collectors.toMap(AbstractExecutable::getId, task -> task));
return tasks.stream().filter(task -> StringUtils.isBlank(task.getPreviousStep()))
.map(task -> calculateDagTaskExecutableDuration(task, executablePO, tasksMap)).max(Long::compare)
.orElse(0L);
}
private Long calculateDagTaskExecutableDuration(AbstractExecutable task, ExecutablePO executablePO,
Map<String, ? extends AbstractExecutable> tasksMap) {
Long nextTaskDurationMax = task.getNextSteps().stream().map(tasksMap::get)
.map(nextTask -> calculateDagTaskExecutableDuration(nextTask, executablePO, tasksMap))
.max(Long::compare).orElse(0L);
return getTaskDuration(task, executablePO) + nextTaskDurationMax;
}
private long calculateChainedExecutableDuration(ExecutablePO executablePO) {
val tasks = ((ChainedExecutable) this).getTasks();
val jobAtomicDuration = new AtomicLong(0);
tasks.forEach(task -> {
long taskDuration = getTaskDuration(task, executablePO);
jobAtomicDuration.addAndGet(taskDuration);
});
return jobAtomicDuration.get();
}
@VisibleForTesting
public long getTaskDurationToTest(AbstractExecutable task, ExecutablePO executablePO) {
return getTaskDuration(task, executablePO);
}
private long getTaskDuration(AbstractExecutable task, ExecutablePO executablePO) {
var taskDuration = task.getDuration(executablePO);
if (task instanceof ChainedStageExecutable) {
taskDuration = calculateSingleSegmentStagesDuration((ChainedStageExecutable) task, executablePO,
taskDuration);
}
return taskDuration;
}
private long calculateSingleSegmentStagesDuration(ChainedStageExecutable task, ExecutablePO executablePO,
long taskDuration) {
val stagesMap = task.getStagesMap();
if (stagesMap.size() == 1) {
for (Map.Entry<String, List<StageExecutable>> entry : stagesMap.entrySet()) {
taskDuration = entry.getValue().stream()
.map(stage -> getStageDuration(stage.getOutput(entry.getKey()), getParent())) //
.mapToLong(Long::valueOf) //
.sum();
}
}
return taskDuration;
}
public long getDuration() {
return getDuration(getOutput());
}
public long getDuration(ExecutablePO executablePO) {
return getDuration(getOutput(executablePO));
}
public static long computeDuration(Output output) {
if (output.getStartTime() == 0) {
return 0;
}
return output.getEndTime() == 0 ? System.currentTimeMillis() - output.getStartTime()
: output.getEndTime() - output.getStartTime();
}
// just used for the stage job
public static long getStageDuration(Output output, AbstractExecutable parent) {
if (output.getDuration() != 0) {
var duration = output.getDuration();
// If the parent job is not running, the duration of the stage
// is no longer counted no matter what state the stage is
if (parent != null && parent.getStatus() == ExecutableState.RUNNING
&& ExecutableState.RUNNING == output.getState()) {
duration = duration + System.currentTimeMillis() - output.getLastRunningStartTime();
}
return duration;
}
return computeDuration(output);
}
public static long getDuration(Output output) {
if (output.getDuration() != 0) {
var duration = output.getDuration();
if (ExecutableState.RUNNING == output.getState()) {
duration = duration + System.currentTimeMillis() - output.getLastRunningStartTime();
}
return duration;
}
return computeDuration(output);
}
public long getWaitTime() {
String jobId = ExecutableManager.extractJobId(getId());
return getWaitTime(getManager().getExecutablePO(jobId));
}
public long getWaitTime(ExecutablePO po) {
Output output = getOutput(po);
long startTime = output.getStartTime();
long lastTaskEndTime = output.getCreateTime();
var lastTaskStatus = output.getState();
int stepId = getStepId();
// get end_time of last task
if (getParent(po) instanceof DefaultExecutable) {
val parentExecutable = (DefaultExecutable) getParent(po);
val lastExecutable = parentExecutable.getSubTaskByStepId(stepId - 1);
lastTaskEndTime = lastExecutable.map(e -> e.getEndTime(po))
.orElse(parentExecutable.getOutput(po).getCreateTime());
lastTaskStatus = lastExecutable.map(e -> e.getStatus(po)).orElse(parentExecutable.getStatus(po));
}
//if last task is not end, wait_time is 0
if (stepId > 0 && (lastTaskEndTime == 0 || lastTaskStatus != ExecutableState.SUCCEED)) {
return 0;
}
if (startTime == 0) {
if (getParent(po) != null && getParent(po).getStatus(po) == ExecutableState.DISCARDED) {
// job is discarded before started
startTime = getParent(po).getEndTime(po);
} else {
//the job/task is not started, use the current time
startTime = System.currentTimeMillis();
}
}
long waitTime = startTime - lastTaskEndTime;
return waitTime < 0 ? 0 : waitTime;
}
public long getTotalDurationTime() {
return getDuration() + getWaitTime();
}
public final Set<String> getDependentFiles() {
val value = getExtraInfo().getOrDefault(DEPENDENT_FILES, "");
if (StringUtils.isEmpty(value)) {
return Sets.newHashSet();
}
return Sets.newHashSet(value.split(","));
}
/**
* job get DISCARD or PAUSE without job fetcher's awareness
*
* SUICIDE is not the case, as it is awared by job fetcher
*
*/
protected final boolean isStoppedNonVoluntarily() {
Preconditions.checkState(getParent() == null);
final ExecutableState status = getOutput().getState();
return status.isStoppedNonVoluntarily();
}
protected boolean needRetry() {
return this.retry <= getConfig().getJobRetry();
}
public Set<String> getDependencies(KylinConfig config) {
return Sets.newHashSet();
}
private static int computeTableAnalyzeMemory() {
KylinConfig config = KylinConfig.getInstanceFromEnv();
return config.getSparkEngineDriverMemoryTableSampling();
}
private static int computeSnapshotAnalyzeMemory() {
KylinConfig config = KylinConfig.getInstanceFromEnv();
return config.getSparkEngineDriverMemorySnapshotBuilding();
}
private static int computeInternalTableLoadMemory() {
KylinConfig config = KylinConfig.getInstanceFromEnv();
return config.getSparkEngineDriverMemoryInternalTableLoading();
}
public int computeStepDriverMemory() {
switch (getJobType().getCategory()) {
case OTHER:
return computeTableAnalyzeMemory();
case SNAPSHOT:
return computeSnapshotAnalyzeMemory();
case INTERNAL:
return computeInternalTableLoadMemory();
default:
String layouts = getParam(NBatchConstants.P_LAYOUT_IDS);
if (layouts != null) {
return computeDriverMemory(StringHelper.splitAndTrim(layouts, ",").length);
}
}
return DEFAULT_DRIVER_MEMORY;
}
public static Integer computeDriverMemory(Integer cuboidNum) {
KylinConfig config = KylinConfig.getInstanceFromEnv();
int[] driverMemoryStrategy = config.getSparkEngineDriverMemoryStrategy();
List<Integer> strategy = Lists.newArrayList(cuboidNum);
Arrays.stream(driverMemoryStrategy).forEach(strategy::add);
Collections.sort(strategy);
int index = strategy.indexOf(cuboidNum);
int driverMemoryMaximum = config.getSparkEngineDriverMemoryMaximum();
int driverMemoryBase = config.getSparkEngineDriverMemoryBase();
driverMemoryBase += driverMemoryBase * index;
return Math.min(driverMemoryBase, driverMemoryMaximum);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this).add("id", getId()).add("name", getName()).add("state", getStatus())
.toString();
}
public <T> T wrapWithExecuteException(final Callable<T> lambda) throws ExecuteException {
Exception exception = null;
try {
return lambda.call();
} catch (ExecuteException e) {
exception = e;
throw e;
} catch (Exception e) {
exception = e;
throw new ExecuteException(e);
} finally {
if (exception != null && !(exception instanceof JobStoppedNonVoluntarilyException)) {
wrapWithExecuteExceptionUpdateJobError(exception);
}
}
}
protected void wrapWithExecuteExceptionUpdateJobError(Exception exception) {
JobContextUtil.withTxAndRetry(() -> {
getExecutableManager(project).updateJobError(getId(), getId(), null,
ExceptionUtils.getStackTrace(exception), exception.getMessage());
return true;
});
}
public boolean isInternalTableSparkJob() {
return false;
}
}
|
googleapis/google-cloud-java | 35,784 | java-filestore/proto-google-cloud-filestore-v1beta1/src/main/java/com/google/cloud/filestore/v1beta1/UpdateShareRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/filestore/v1beta1/cloud_filestore_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.filestore.v1beta1;
/**
*
*
* <pre>
* UpdateShareRequest updates the settings of a share.
* </pre>
*
* Protobuf type {@code google.cloud.filestore.v1beta1.UpdateShareRequest}
*/
public final class UpdateShareRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.filestore.v1beta1.UpdateShareRequest)
UpdateShareRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateShareRequest.newBuilder() to construct.
private UpdateShareRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateShareRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateShareRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.filestore.v1beta1.CloudFilestoreServiceProto
.internal_static_google_cloud_filestore_v1beta1_UpdateShareRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.filestore.v1beta1.CloudFilestoreServiceProto
.internal_static_google_cloud_filestore_v1beta1_UpdateShareRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.filestore.v1beta1.UpdateShareRequest.class,
com.google.cloud.filestore.v1beta1.UpdateShareRequest.Builder.class);
}
private int bitField0_;
public static final int SHARE_FIELD_NUMBER = 1;
private com.google.cloud.filestore.v1beta1.Share share_;
/**
*
*
* <pre>
* Required. A share resource.
* Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the share field is set.
*/
@java.lang.Override
public boolean hasShare() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. A share resource.
* Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The share.
*/
@java.lang.Override
public com.google.cloud.filestore.v1beta1.Share getShare() {
return share_ == null ? com.google.cloud.filestore.v1beta1.Share.getDefaultInstance() : share_;
}
/**
*
*
* <pre>
* Required. A share resource.
* Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.filestore.v1beta1.ShareOrBuilder getShareOrBuilder() {
return share_ == null ? com.google.cloud.filestore.v1beta1.Share.getDefaultInstance() : share_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field. The elements of the repeated paths field may only include these
* fields:
*
* * "description"
* * "capacity_gb"
* * "labels"
* * "nfs_export_options"
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field. The elements of the repeated paths field may only include these
* fields:
*
* * "description"
* * "capacity_gb"
* * "labels"
* * "nfs_export_options"
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field. The elements of the repeated paths field may only include these
* fields:
*
* * "description"
* * "capacity_gb"
* * "labels"
* * "nfs_export_options"
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getShare());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getShare());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.filestore.v1beta1.UpdateShareRequest)) {
return super.equals(obj);
}
com.google.cloud.filestore.v1beta1.UpdateShareRequest other =
(com.google.cloud.filestore.v1beta1.UpdateShareRequest) obj;
if (hasShare() != other.hasShare()) return false;
if (hasShare()) {
if (!getShare().equals(other.getShare())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasShare()) {
hash = (37 * hash) + SHARE_FIELD_NUMBER;
hash = (53 * hash) + getShare().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.filestore.v1beta1.UpdateShareRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.filestore.v1beta1.UpdateShareRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.filestore.v1beta1.UpdateShareRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.filestore.v1beta1.UpdateShareRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.filestore.v1beta1.UpdateShareRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.filestore.v1beta1.UpdateShareRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.filestore.v1beta1.UpdateShareRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.filestore.v1beta1.UpdateShareRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.filestore.v1beta1.UpdateShareRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.filestore.v1beta1.UpdateShareRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.filestore.v1beta1.UpdateShareRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.filestore.v1beta1.UpdateShareRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.filestore.v1beta1.UpdateShareRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* UpdateShareRequest updates the settings of a share.
* </pre>
*
* Protobuf type {@code google.cloud.filestore.v1beta1.UpdateShareRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.filestore.v1beta1.UpdateShareRequest)
com.google.cloud.filestore.v1beta1.UpdateShareRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.filestore.v1beta1.CloudFilestoreServiceProto
.internal_static_google_cloud_filestore_v1beta1_UpdateShareRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.filestore.v1beta1.CloudFilestoreServiceProto
.internal_static_google_cloud_filestore_v1beta1_UpdateShareRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.filestore.v1beta1.UpdateShareRequest.class,
com.google.cloud.filestore.v1beta1.UpdateShareRequest.Builder.class);
}
// Construct using com.google.cloud.filestore.v1beta1.UpdateShareRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getShareFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
share_ = null;
if (shareBuilder_ != null) {
shareBuilder_.dispose();
shareBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.filestore.v1beta1.CloudFilestoreServiceProto
.internal_static_google_cloud_filestore_v1beta1_UpdateShareRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.filestore.v1beta1.UpdateShareRequest getDefaultInstanceForType() {
return com.google.cloud.filestore.v1beta1.UpdateShareRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.filestore.v1beta1.UpdateShareRequest build() {
com.google.cloud.filestore.v1beta1.UpdateShareRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.filestore.v1beta1.UpdateShareRequest buildPartial() {
com.google.cloud.filestore.v1beta1.UpdateShareRequest result =
new com.google.cloud.filestore.v1beta1.UpdateShareRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.filestore.v1beta1.UpdateShareRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.share_ = shareBuilder_ == null ? share_ : shareBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.filestore.v1beta1.UpdateShareRequest) {
return mergeFrom((com.google.cloud.filestore.v1beta1.UpdateShareRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.filestore.v1beta1.UpdateShareRequest other) {
if (other == com.google.cloud.filestore.v1beta1.UpdateShareRequest.getDefaultInstance())
return this;
if (other.hasShare()) {
mergeShare(other.getShare());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getShareFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.filestore.v1beta1.Share share_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.filestore.v1beta1.Share,
com.google.cloud.filestore.v1beta1.Share.Builder,
com.google.cloud.filestore.v1beta1.ShareOrBuilder>
shareBuilder_;
/**
*
*
* <pre>
* Required. A share resource.
* Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the share field is set.
*/
public boolean hasShare() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. A share resource.
* Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The share.
*/
public com.google.cloud.filestore.v1beta1.Share getShare() {
if (shareBuilder_ == null) {
return share_ == null
? com.google.cloud.filestore.v1beta1.Share.getDefaultInstance()
: share_;
} else {
return shareBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. A share resource.
* Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setShare(com.google.cloud.filestore.v1beta1.Share value) {
if (shareBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
share_ = value;
} else {
shareBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A share resource.
* Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setShare(com.google.cloud.filestore.v1beta1.Share.Builder builderForValue) {
if (shareBuilder_ == null) {
share_ = builderForValue.build();
} else {
shareBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A share resource.
* Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeShare(com.google.cloud.filestore.v1beta1.Share value) {
if (shareBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& share_ != null
&& share_ != com.google.cloud.filestore.v1beta1.Share.getDefaultInstance()) {
getShareBuilder().mergeFrom(value);
} else {
share_ = value;
}
} else {
shareBuilder_.mergeFrom(value);
}
if (share_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. A share resource.
* Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearShare() {
bitField0_ = (bitField0_ & ~0x00000001);
share_ = null;
if (shareBuilder_ != null) {
shareBuilder_.dispose();
shareBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A share resource.
* Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.filestore.v1beta1.Share.Builder getShareBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getShareFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. A share resource.
* Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.filestore.v1beta1.ShareOrBuilder getShareOrBuilder() {
if (shareBuilder_ != null) {
return shareBuilder_.getMessageOrBuilder();
} else {
return share_ == null
? com.google.cloud.filestore.v1beta1.Share.getDefaultInstance()
: share_;
}
}
/**
*
*
* <pre>
* Required. A share resource.
* Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.filestore.v1beta1.Share,
com.google.cloud.filestore.v1beta1.Share.Builder,
com.google.cloud.filestore.v1beta1.ShareOrBuilder>
getShareFieldBuilder() {
if (shareBuilder_ == null) {
shareBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.filestore.v1beta1.Share,
com.google.cloud.filestore.v1beta1.Share.Builder,
com.google.cloud.filestore.v1beta1.ShareOrBuilder>(
getShare(), getParentForChildren(), isClean());
share_ = null;
}
return shareBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field. The elements of the repeated paths field may only include these
* fields:
*
* * "description"
* * "capacity_gb"
* * "labels"
* * "nfs_export_options"
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field. The elements of the repeated paths field may only include these
* fields:
*
* * "description"
* * "capacity_gb"
* * "labels"
* * "nfs_export_options"
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field. The elements of the repeated paths field may only include these
* fields:
*
* * "description"
* * "capacity_gb"
* * "labels"
* * "nfs_export_options"
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field. The elements of the repeated paths field may only include these
* fields:
*
* * "description"
* * "capacity_gb"
* * "labels"
* * "nfs_export_options"
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field. The elements of the repeated paths field may only include these
* fields:
*
* * "description"
* * "capacity_gb"
* * "labels"
* * "nfs_export_options"
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field. The elements of the repeated paths field may only include these
* fields:
*
* * "description"
* * "capacity_gb"
* * "labels"
* * "nfs_export_options"
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field. The elements of the repeated paths field may only include these
* fields:
*
* * "description"
* * "capacity_gb"
* * "labels"
* * "nfs_export_options"
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field. The elements of the repeated paths field may only include these
* fields:
*
* * "description"
* * "capacity_gb"
* * "labels"
* * "nfs_export_options"
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field. The elements of the repeated paths field may only include these
* fields:
*
* * "description"
* * "capacity_gb"
* * "labels"
* * "nfs_export_options"
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.filestore.v1beta1.UpdateShareRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.filestore.v1beta1.UpdateShareRequest)
private static final com.google.cloud.filestore.v1beta1.UpdateShareRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.filestore.v1beta1.UpdateShareRequest();
}
public static com.google.cloud.filestore.v1beta1.UpdateShareRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateShareRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateShareRequest>() {
@java.lang.Override
public UpdateShareRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateShareRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateShareRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.filestore.v1beta1.UpdateShareRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,850 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/ListStudiesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/vizier_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Response message for
* [VizierService.ListStudies][google.cloud.aiplatform.v1.VizierService.ListStudies].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ListStudiesResponse}
*/
public final class ListStudiesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ListStudiesResponse)
ListStudiesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListStudiesResponse.newBuilder() to construct.
private ListStudiesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListStudiesResponse() {
studies_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListStudiesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.VizierServiceProto
.internal_static_google_cloud_aiplatform_v1_ListStudiesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.VizierServiceProto
.internal_static_google_cloud_aiplatform_v1_ListStudiesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ListStudiesResponse.class,
com.google.cloud.aiplatform.v1.ListStudiesResponse.Builder.class);
}
public static final int STUDIES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.aiplatform.v1.Study> studies_;
/**
*
*
* <pre>
* The studies associated with the project.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Study studies = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.aiplatform.v1.Study> getStudiesList() {
return studies_;
}
/**
*
*
* <pre>
* The studies associated with the project.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Study studies = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.aiplatform.v1.StudyOrBuilder>
getStudiesOrBuilderList() {
return studies_;
}
/**
*
*
* <pre>
* The studies associated with the project.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Study studies = 1;</code>
*/
@java.lang.Override
public int getStudiesCount() {
return studies_.size();
}
/**
*
*
* <pre>
* The studies associated with the project.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Study studies = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.Study getStudies(int index) {
return studies_.get(index);
}
/**
*
*
* <pre>
* The studies associated with the project.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Study studies = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.StudyOrBuilder getStudiesOrBuilder(int index) {
return studies_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Passes this token as the `page_token` field of the request for a
* subsequent call.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Passes this token as the `page_token` field of the request for a
* subsequent call.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < studies_.size(); i++) {
output.writeMessage(1, studies_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < studies_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, studies_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.ListStudiesResponse)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.ListStudiesResponse other =
(com.google.cloud.aiplatform.v1.ListStudiesResponse) obj;
if (!getStudiesList().equals(other.getStudiesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getStudiesCount() > 0) {
hash = (37 * hash) + STUDIES_FIELD_NUMBER;
hash = (53 * hash) + getStudiesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.ListStudiesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListStudiesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListStudiesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListStudiesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListStudiesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListStudiesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListStudiesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListStudiesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListStudiesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListStudiesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListStudiesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListStudiesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.aiplatform.v1.ListStudiesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [VizierService.ListStudies][google.cloud.aiplatform.v1.VizierService.ListStudies].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ListStudiesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ListStudiesResponse)
com.google.cloud.aiplatform.v1.ListStudiesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.VizierServiceProto
.internal_static_google_cloud_aiplatform_v1_ListStudiesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.VizierServiceProto
.internal_static_google_cloud_aiplatform_v1_ListStudiesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ListStudiesResponse.class,
com.google.cloud.aiplatform.v1.ListStudiesResponse.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.ListStudiesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (studiesBuilder_ == null) {
studies_ = java.util.Collections.emptyList();
} else {
studies_ = null;
studiesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.VizierServiceProto
.internal_static_google_cloud_aiplatform_v1_ListStudiesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListStudiesResponse getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.ListStudiesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListStudiesResponse build() {
com.google.cloud.aiplatform.v1.ListStudiesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListStudiesResponse buildPartial() {
com.google.cloud.aiplatform.v1.ListStudiesResponse result =
new com.google.cloud.aiplatform.v1.ListStudiesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.aiplatform.v1.ListStudiesResponse result) {
if (studiesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
studies_ = java.util.Collections.unmodifiableList(studies_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.studies_ = studies_;
} else {
result.studies_ = studiesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.aiplatform.v1.ListStudiesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.ListStudiesResponse) {
return mergeFrom((com.google.cloud.aiplatform.v1.ListStudiesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.ListStudiesResponse other) {
if (other == com.google.cloud.aiplatform.v1.ListStudiesResponse.getDefaultInstance())
return this;
if (studiesBuilder_ == null) {
if (!other.studies_.isEmpty()) {
if (studies_.isEmpty()) {
studies_ = other.studies_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureStudiesIsMutable();
studies_.addAll(other.studies_);
}
onChanged();
}
} else {
if (!other.studies_.isEmpty()) {
if (studiesBuilder_.isEmpty()) {
studiesBuilder_.dispose();
studiesBuilder_ = null;
studies_ = other.studies_;
bitField0_ = (bitField0_ & ~0x00000001);
studiesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getStudiesFieldBuilder()
: null;
} else {
studiesBuilder_.addAllMessages(other.studies_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.aiplatform.v1.Study m =
input.readMessage(
com.google.cloud.aiplatform.v1.Study.parser(), extensionRegistry);
if (studiesBuilder_ == null) {
ensureStudiesIsMutable();
studies_.add(m);
} else {
studiesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.aiplatform.v1.Study> studies_ =
java.util.Collections.emptyList();
private void ensureStudiesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
studies_ = new java.util.ArrayList<com.google.cloud.aiplatform.v1.Study>(studies_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Study,
com.google.cloud.aiplatform.v1.Study.Builder,
com.google.cloud.aiplatform.v1.StudyOrBuilder>
studiesBuilder_;
/**
*
*
* <pre>
* The studies associated with the project.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Study studies = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.Study> getStudiesList() {
if (studiesBuilder_ == null) {
return java.util.Collections.unmodifiableList(studies_);
} else {
return studiesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The studies associated with the project.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Study studies = 1;</code>
*/
public int getStudiesCount() {
if (studiesBuilder_ == null) {
return studies_.size();
} else {
return studiesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The studies associated with the project.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Study studies = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Study getStudies(int index) {
if (studiesBuilder_ == null) {
return studies_.get(index);
} else {
return studiesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The studies associated with the project.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Study studies = 1;</code>
*/
public Builder setStudies(int index, com.google.cloud.aiplatform.v1.Study value) {
if (studiesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureStudiesIsMutable();
studies_.set(index, value);
onChanged();
} else {
studiesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The studies associated with the project.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Study studies = 1;</code>
*/
public Builder setStudies(
int index, com.google.cloud.aiplatform.v1.Study.Builder builderForValue) {
if (studiesBuilder_ == null) {
ensureStudiesIsMutable();
studies_.set(index, builderForValue.build());
onChanged();
} else {
studiesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The studies associated with the project.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Study studies = 1;</code>
*/
public Builder addStudies(com.google.cloud.aiplatform.v1.Study value) {
if (studiesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureStudiesIsMutable();
studies_.add(value);
onChanged();
} else {
studiesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The studies associated with the project.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Study studies = 1;</code>
*/
public Builder addStudies(int index, com.google.cloud.aiplatform.v1.Study value) {
if (studiesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureStudiesIsMutable();
studies_.add(index, value);
onChanged();
} else {
studiesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The studies associated with the project.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Study studies = 1;</code>
*/
public Builder addStudies(com.google.cloud.aiplatform.v1.Study.Builder builderForValue) {
if (studiesBuilder_ == null) {
ensureStudiesIsMutable();
studies_.add(builderForValue.build());
onChanged();
} else {
studiesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The studies associated with the project.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Study studies = 1;</code>
*/
public Builder addStudies(
int index, com.google.cloud.aiplatform.v1.Study.Builder builderForValue) {
if (studiesBuilder_ == null) {
ensureStudiesIsMutable();
studies_.add(index, builderForValue.build());
onChanged();
} else {
studiesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The studies associated with the project.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Study studies = 1;</code>
*/
public Builder addAllStudies(
java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.Study> values) {
if (studiesBuilder_ == null) {
ensureStudiesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, studies_);
onChanged();
} else {
studiesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The studies associated with the project.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Study studies = 1;</code>
*/
public Builder clearStudies() {
if (studiesBuilder_ == null) {
studies_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
studiesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The studies associated with the project.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Study studies = 1;</code>
*/
public Builder removeStudies(int index) {
if (studiesBuilder_ == null) {
ensureStudiesIsMutable();
studies_.remove(index);
onChanged();
} else {
studiesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The studies associated with the project.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Study studies = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Study.Builder getStudiesBuilder(int index) {
return getStudiesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The studies associated with the project.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Study studies = 1;</code>
*/
public com.google.cloud.aiplatform.v1.StudyOrBuilder getStudiesOrBuilder(int index) {
if (studiesBuilder_ == null) {
return studies_.get(index);
} else {
return studiesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The studies associated with the project.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Study studies = 1;</code>
*/
public java.util.List<? extends com.google.cloud.aiplatform.v1.StudyOrBuilder>
getStudiesOrBuilderList() {
if (studiesBuilder_ != null) {
return studiesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(studies_);
}
}
/**
*
*
* <pre>
* The studies associated with the project.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Study studies = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Study.Builder addStudiesBuilder() {
return getStudiesFieldBuilder()
.addBuilder(com.google.cloud.aiplatform.v1.Study.getDefaultInstance());
}
/**
*
*
* <pre>
* The studies associated with the project.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Study studies = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Study.Builder addStudiesBuilder(int index) {
return getStudiesFieldBuilder()
.addBuilder(index, com.google.cloud.aiplatform.v1.Study.getDefaultInstance());
}
/**
*
*
* <pre>
* The studies associated with the project.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Study studies = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.Study.Builder> getStudiesBuilderList() {
return getStudiesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Study,
com.google.cloud.aiplatform.v1.Study.Builder,
com.google.cloud.aiplatform.v1.StudyOrBuilder>
getStudiesFieldBuilder() {
if (studiesBuilder_ == null) {
studiesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Study,
com.google.cloud.aiplatform.v1.Study.Builder,
com.google.cloud.aiplatform.v1.StudyOrBuilder>(
studies_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
studies_ = null;
}
return studiesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Passes this token as the `page_token` field of the request for a
* subsequent call.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Passes this token as the `page_token` field of the request for a
* subsequent call.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Passes this token as the `page_token` field of the request for a
* subsequent call.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Passes this token as the `page_token` field of the request for a
* subsequent call.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Passes this token as the `page_token` field of the request for a
* subsequent call.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ListStudiesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ListStudiesResponse)
private static final com.google.cloud.aiplatform.v1.ListStudiesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ListStudiesResponse();
}
public static com.google.cloud.aiplatform.v1.ListStudiesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListStudiesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListStudiesResponse>() {
@java.lang.Override
public ListStudiesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListStudiesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListStudiesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListStudiesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,891 | java-shopping-merchant-accounts/proto-google-shopping-merchant-accounts-v1beta/src/main/java/com/google/shopping/merchant/accounts/v1beta/UpdateEmailPreferencesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/shopping/merchant/accounts/v1beta/emailpreferences.proto
// Protobuf Java Version: 3.25.8
package com.google.shopping.merchant.accounts.v1beta;
/**
*
*
* <pre>
* Request message for UpdateEmailPreferences method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest}
*/
public final class UpdateEmailPreferencesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest)
UpdateEmailPreferencesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateEmailPreferencesRequest.newBuilder() to construct.
private UpdateEmailPreferencesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateEmailPreferencesRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateEmailPreferencesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1beta.EmailPreferencesProto
.internal_static_google_shopping_merchant_accounts_v1beta_UpdateEmailPreferencesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1beta.EmailPreferencesProto
.internal_static_google_shopping_merchant_accounts_v1beta_UpdateEmailPreferencesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest.class,
com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest.Builder
.class);
}
private int bitField0_;
public static final int EMAIL_PREFERENCES_FIELD_NUMBER = 1;
private com.google.shopping.merchant.accounts.v1beta.EmailPreferences emailPreferences_;
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the emailPreferences field is set.
*/
@java.lang.Override
public boolean hasEmailPreferences() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The emailPreferences.
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.EmailPreferences getEmailPreferences() {
return emailPreferences_ == null
? com.google.shopping.merchant.accounts.v1beta.EmailPreferences.getDefaultInstance()
: emailPreferences_;
}
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.EmailPreferencesOrBuilder
getEmailPreferencesOrBuilder() {
return emailPreferences_ == null
? com.google.shopping.merchant.accounts.v1beta.EmailPreferences.getDefaultInstance()
: emailPreferences_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getEmailPreferences());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getEmailPreferences());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest)) {
return super.equals(obj);
}
com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest other =
(com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest) obj;
if (hasEmailPreferences() != other.hasEmailPreferences()) return false;
if (hasEmailPreferences()) {
if (!getEmailPreferences().equals(other.getEmailPreferences())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasEmailPreferences()) {
hash = (37 * hash) + EMAIL_PREFERENCES_FIELD_NUMBER;
hash = (53 * hash) + getEmailPreferences().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for UpdateEmailPreferences method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest)
com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1beta.EmailPreferencesProto
.internal_static_google_shopping_merchant_accounts_v1beta_UpdateEmailPreferencesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1beta.EmailPreferencesProto
.internal_static_google_shopping_merchant_accounts_v1beta_UpdateEmailPreferencesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest.class,
com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest.Builder
.class);
}
// Construct using
// com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getEmailPreferencesFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
emailPreferences_ = null;
if (emailPreferencesBuilder_ != null) {
emailPreferencesBuilder_.dispose();
emailPreferencesBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.shopping.merchant.accounts.v1beta.EmailPreferencesProto
.internal_static_google_shopping_merchant_accounts_v1beta_UpdateEmailPreferencesRequest_descriptor;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest
getDefaultInstanceForType() {
return com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest build() {
com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest
buildPartial() {
com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest result =
new com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.emailPreferences_ =
emailPreferencesBuilder_ == null ? emailPreferences_ : emailPreferencesBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest) {
return mergeFrom(
(com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest other) {
if (other
== com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest
.getDefaultInstance()) return this;
if (other.hasEmailPreferences()) {
mergeEmailPreferences(other.getEmailPreferences());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(
getEmailPreferencesFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.shopping.merchant.accounts.v1beta.EmailPreferences emailPreferences_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1beta.EmailPreferences,
com.google.shopping.merchant.accounts.v1beta.EmailPreferences.Builder,
com.google.shopping.merchant.accounts.v1beta.EmailPreferencesOrBuilder>
emailPreferencesBuilder_;
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the emailPreferences field is set.
*/
public boolean hasEmailPreferences() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The emailPreferences.
*/
public com.google.shopping.merchant.accounts.v1beta.EmailPreferences getEmailPreferences() {
if (emailPreferencesBuilder_ == null) {
return emailPreferences_ == null
? com.google.shopping.merchant.accounts.v1beta.EmailPreferences.getDefaultInstance()
: emailPreferences_;
} else {
return emailPreferencesBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setEmailPreferences(
com.google.shopping.merchant.accounts.v1beta.EmailPreferences value) {
if (emailPreferencesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
emailPreferences_ = value;
} else {
emailPreferencesBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setEmailPreferences(
com.google.shopping.merchant.accounts.v1beta.EmailPreferences.Builder builderForValue) {
if (emailPreferencesBuilder_ == null) {
emailPreferences_ = builderForValue.build();
} else {
emailPreferencesBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeEmailPreferences(
com.google.shopping.merchant.accounts.v1beta.EmailPreferences value) {
if (emailPreferencesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& emailPreferences_ != null
&& emailPreferences_
!= com.google.shopping.merchant.accounts.v1beta.EmailPreferences
.getDefaultInstance()) {
getEmailPreferencesBuilder().mergeFrom(value);
} else {
emailPreferences_ = value;
}
} else {
emailPreferencesBuilder_.mergeFrom(value);
}
if (emailPreferences_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearEmailPreferences() {
bitField0_ = (bitField0_ & ~0x00000001);
emailPreferences_ = null;
if (emailPreferencesBuilder_ != null) {
emailPreferencesBuilder_.dispose();
emailPreferencesBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.shopping.merchant.accounts.v1beta.EmailPreferences.Builder
getEmailPreferencesBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getEmailPreferencesFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.shopping.merchant.accounts.v1beta.EmailPreferencesOrBuilder
getEmailPreferencesOrBuilder() {
if (emailPreferencesBuilder_ != null) {
return emailPreferencesBuilder_.getMessageOrBuilder();
} else {
return emailPreferences_ == null
? com.google.shopping.merchant.accounts.v1beta.EmailPreferences.getDefaultInstance()
: emailPreferences_;
}
}
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1beta.EmailPreferences,
com.google.shopping.merchant.accounts.v1beta.EmailPreferences.Builder,
com.google.shopping.merchant.accounts.v1beta.EmailPreferencesOrBuilder>
getEmailPreferencesFieldBuilder() {
if (emailPreferencesBuilder_ == null) {
emailPreferencesBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1beta.EmailPreferences,
com.google.shopping.merchant.accounts.v1beta.EmailPreferences.Builder,
com.google.shopping.merchant.accounts.v1beta.EmailPreferencesOrBuilder>(
getEmailPreferences(), getParentForChildren(), isClean());
emailPreferences_ = null;
}
return emailPreferencesBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest)
}
// @@protoc_insertion_point(class_scope:google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest)
private static final com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest();
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateEmailPreferencesRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateEmailPreferencesRequest>() {
@java.lang.Override
public UpdateEmailPreferencesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateEmailPreferencesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateEmailPreferencesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.UpdateEmailPreferencesRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,146 | java-dialogflow/google-cloud-dialogflow/src/main/java/com/google/cloud/dialogflow/v2/stub/HttpJsonConversationDatasetsStub.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dialogflow.v2.stub;
import static com.google.cloud.dialogflow.v2.ConversationDatasetsClient.ListConversationDatasetsPagedResponse;
import static com.google.cloud.dialogflow.v2.ConversationDatasetsClient.ListLocationsPagedResponse;
import com.google.api.HttpRule;
import com.google.api.core.InternalApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.httpjson.ApiMethodDescriptor;
import com.google.api.gax.httpjson.HttpJsonCallSettings;
import com.google.api.gax.httpjson.HttpJsonOperationSnapshot;
import com.google.api.gax.httpjson.HttpJsonStubCallableFactory;
import com.google.api.gax.httpjson.ProtoMessageRequestFormatter;
import com.google.api.gax.httpjson.ProtoMessageResponseParser;
import com.google.api.gax.httpjson.ProtoRestSerializer;
import com.google.api.gax.httpjson.longrunning.stub.HttpJsonOperationsStub;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.RequestParamsBuilder;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.dialogflow.v2.ConversationDataset;
import com.google.cloud.dialogflow.v2.CreateConversationDatasetOperationMetadata;
import com.google.cloud.dialogflow.v2.CreateConversationDatasetRequest;
import com.google.cloud.dialogflow.v2.DeleteConversationDatasetOperationMetadata;
import com.google.cloud.dialogflow.v2.DeleteConversationDatasetRequest;
import com.google.cloud.dialogflow.v2.GetConversationDatasetRequest;
import com.google.cloud.dialogflow.v2.ImportConversationDataOperationMetadata;
import com.google.cloud.dialogflow.v2.ImportConversationDataOperationResponse;
import com.google.cloud.dialogflow.v2.ImportConversationDataRequest;
import com.google.cloud.dialogflow.v2.ListConversationDatasetsRequest;
import com.google.cloud.dialogflow.v2.ListConversationDatasetsResponse;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.collect.ImmutableMap;
import com.google.longrunning.Operation;
import com.google.protobuf.Empty;
import com.google.protobuf.TypeRegistry;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* REST stub implementation for the ConversationDatasets service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public class HttpJsonConversationDatasetsStub extends ConversationDatasetsStub {
private static final TypeRegistry typeRegistry =
TypeRegistry.newBuilder()
.add(ImportConversationDataOperationResponse.getDescriptor())
.add(ConversationDataset.getDescriptor())
.add(Empty.getDescriptor())
.add(DeleteConversationDatasetOperationMetadata.getDescriptor())
.add(CreateConversationDatasetOperationMetadata.getDescriptor())
.add(ImportConversationDataOperationMetadata.getDescriptor())
.build();
private static final ApiMethodDescriptor<CreateConversationDatasetRequest, Operation>
createConversationDatasetMethodDescriptor =
ApiMethodDescriptor.<CreateConversationDatasetRequest, Operation>newBuilder()
.setFullMethodName(
"google.cloud.dialogflow.v2.ConversationDatasets/CreateConversationDataset")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<CreateConversationDatasetRequest>newBuilder()
.setPath(
"/v2/{parent=projects/*/locations/*}/conversationDatasets",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<CreateConversationDatasetRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "parent", request.getParent());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<CreateConversationDatasetRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody(
"conversationDataset",
request.getConversationDataset(),
true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(CreateConversationDatasetRequest request, Operation response) ->
HttpJsonOperationSnapshot.create(response))
.build();
private static final ApiMethodDescriptor<GetConversationDatasetRequest, ConversationDataset>
getConversationDatasetMethodDescriptor =
ApiMethodDescriptor.<GetConversationDatasetRequest, ConversationDataset>newBuilder()
.setFullMethodName(
"google.cloud.dialogflow.v2.ConversationDatasets/GetConversationDataset")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetConversationDatasetRequest>newBuilder()
.setPath(
"/v2/{name=projects/*/conversationDatasets/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetConversationDatasetRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setAdditionalPaths(
"/v2/{name=projects/*/locations/*/conversationDatasets/*}")
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetConversationDatasetRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<ConversationDataset>newBuilder()
.setDefaultInstance(ConversationDataset.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<
ListConversationDatasetsRequest, ListConversationDatasetsResponse>
listConversationDatasetsMethodDescriptor =
ApiMethodDescriptor
.<ListConversationDatasetsRequest, ListConversationDatasetsResponse>newBuilder()
.setFullMethodName(
"google.cloud.dialogflow.v2.ConversationDatasets/ListConversationDatasets")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ListConversationDatasetsRequest>newBuilder()
.setPath(
"/v2/{parent=projects/*}/conversationDatasets",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ListConversationDatasetsRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "parent", request.getParent());
return fields;
})
.setAdditionalPaths(
"/v2/{parent=projects/*/locations/*}/conversationDatasets")
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ListConversationDatasetsRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "pageSize", request.getPageSize());
serializer.putQueryParam(fields, "pageToken", request.getPageToken());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<ListConversationDatasetsResponse>newBuilder()
.setDefaultInstance(ListConversationDatasetsResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<DeleteConversationDatasetRequest, Operation>
deleteConversationDatasetMethodDescriptor =
ApiMethodDescriptor.<DeleteConversationDatasetRequest, Operation>newBuilder()
.setFullMethodName(
"google.cloud.dialogflow.v2.ConversationDatasets/DeleteConversationDataset")
.setHttpMethod("DELETE")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<DeleteConversationDatasetRequest>newBuilder()
.setPath(
"/v2/{name=projects/*/locations/*/conversationDatasets/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<DeleteConversationDatasetRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<DeleteConversationDatasetRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(DeleteConversationDatasetRequest request, Operation response) ->
HttpJsonOperationSnapshot.create(response))
.build();
private static final ApiMethodDescriptor<ImportConversationDataRequest, Operation>
importConversationDataMethodDescriptor =
ApiMethodDescriptor.<ImportConversationDataRequest, Operation>newBuilder()
.setFullMethodName(
"google.cloud.dialogflow.v2.ConversationDatasets/ImportConversationData")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ImportConversationDataRequest>newBuilder()
.setPath(
"/v2/{name=projects/*/conversationDatasets/*}:importConversationData",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ImportConversationDataRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setAdditionalPaths(
"/v2/{name=projects/*/locations/*/conversationDatasets/*}:importConversationData")
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ImportConversationDataRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("*", request.toBuilder().clearName().build(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(ImportConversationDataRequest request, Operation response) ->
HttpJsonOperationSnapshot.create(response))
.build();
private static final ApiMethodDescriptor<ListLocationsRequest, ListLocationsResponse>
listLocationsMethodDescriptor =
ApiMethodDescriptor.<ListLocationsRequest, ListLocationsResponse>newBuilder()
.setFullMethodName("google.cloud.location.Locations/ListLocations")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ListLocationsRequest>newBuilder()
.setPath(
"/v2/{name=projects/*}/locations",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ListLocationsRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ListLocationsRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<ListLocationsResponse>newBuilder()
.setDefaultInstance(ListLocationsResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<GetLocationRequest, Location>
getLocationMethodDescriptor =
ApiMethodDescriptor.<GetLocationRequest, Location>newBuilder()
.setFullMethodName("google.cloud.location.Locations/GetLocation")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetLocationRequest>newBuilder()
.setPath(
"/v2/{name=projects/*/locations/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetLocationRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetLocationRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Location>newBuilder()
.setDefaultInstance(Location.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private final UnaryCallable<CreateConversationDatasetRequest, Operation>
createConversationDatasetCallable;
private final OperationCallable<
CreateConversationDatasetRequest,
ConversationDataset,
CreateConversationDatasetOperationMetadata>
createConversationDatasetOperationCallable;
private final UnaryCallable<GetConversationDatasetRequest, ConversationDataset>
getConversationDatasetCallable;
private final UnaryCallable<ListConversationDatasetsRequest, ListConversationDatasetsResponse>
listConversationDatasetsCallable;
private final UnaryCallable<
ListConversationDatasetsRequest, ListConversationDatasetsPagedResponse>
listConversationDatasetsPagedCallable;
private final UnaryCallable<DeleteConversationDatasetRequest, Operation>
deleteConversationDatasetCallable;
private final OperationCallable<
DeleteConversationDatasetRequest, Empty, DeleteConversationDatasetOperationMetadata>
deleteConversationDatasetOperationCallable;
private final UnaryCallable<ImportConversationDataRequest, Operation>
importConversationDataCallable;
private final OperationCallable<
ImportConversationDataRequest,
ImportConversationDataOperationResponse,
ImportConversationDataOperationMetadata>
importConversationDataOperationCallable;
private final UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable;
private final UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse>
listLocationsPagedCallable;
private final UnaryCallable<GetLocationRequest, Location> getLocationCallable;
private final BackgroundResource backgroundResources;
private final HttpJsonOperationsStub httpJsonOperationsStub;
private final HttpJsonStubCallableFactory callableFactory;
public static final HttpJsonConversationDatasetsStub create(
ConversationDatasetsStubSettings settings) throws IOException {
return new HttpJsonConversationDatasetsStub(settings, ClientContext.create(settings));
}
public static final HttpJsonConversationDatasetsStub create(ClientContext clientContext)
throws IOException {
return new HttpJsonConversationDatasetsStub(
ConversationDatasetsStubSettings.newHttpJsonBuilder().build(), clientContext);
}
public static final HttpJsonConversationDatasetsStub create(
ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException {
return new HttpJsonConversationDatasetsStub(
ConversationDatasetsStubSettings.newHttpJsonBuilder().build(),
clientContext,
callableFactory);
}
/**
* Constructs an instance of HttpJsonConversationDatasetsStub, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected HttpJsonConversationDatasetsStub(
ConversationDatasetsStubSettings settings, ClientContext clientContext) throws IOException {
this(settings, clientContext, new HttpJsonConversationDatasetsCallableFactory());
}
/**
* Constructs an instance of HttpJsonConversationDatasetsStub, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected HttpJsonConversationDatasetsStub(
ConversationDatasetsStubSettings settings,
ClientContext clientContext,
HttpJsonStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.httpJsonOperationsStub =
HttpJsonOperationsStub.create(
clientContext,
callableFactory,
typeRegistry,
ImmutableMap.<String, HttpRule>builder()
.put(
"google.longrunning.Operations.CancelOperation",
HttpRule.newBuilder()
.setPost("/v2/{name=projects/*/operations/*}:cancel")
.addAdditionalBindings(
HttpRule.newBuilder()
.setPost("/v2/{name=projects/*/locations/*/operations/*}:cancel")
.build())
.build())
.put(
"google.longrunning.Operations.GetOperation",
HttpRule.newBuilder()
.setGet("/v2/{name=projects/*/operations/*}")
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet("/v2/{name=projects/*/locations/*/operations/*}")
.build())
.build())
.put(
"google.longrunning.Operations.ListOperations",
HttpRule.newBuilder()
.setGet("/v2/{name=projects/*}/operations")
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet("/v2/{name=projects/*/locations/*}/operations")
.build())
.build())
.build());
HttpJsonCallSettings<CreateConversationDatasetRequest, Operation>
createConversationDatasetTransportSettings =
HttpJsonCallSettings.<CreateConversationDatasetRequest, Operation>newBuilder()
.setMethodDescriptor(createConversationDatasetMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
HttpJsonCallSettings<GetConversationDatasetRequest, ConversationDataset>
getConversationDatasetTransportSettings =
HttpJsonCallSettings.<GetConversationDatasetRequest, ConversationDataset>newBuilder()
.setMethodDescriptor(getConversationDatasetMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<ListConversationDatasetsRequest, ListConversationDatasetsResponse>
listConversationDatasetsTransportSettings =
HttpJsonCallSettings
.<ListConversationDatasetsRequest, ListConversationDatasetsResponse>newBuilder()
.setMethodDescriptor(listConversationDatasetsMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
HttpJsonCallSettings<DeleteConversationDatasetRequest, Operation>
deleteConversationDatasetTransportSettings =
HttpJsonCallSettings.<DeleteConversationDatasetRequest, Operation>newBuilder()
.setMethodDescriptor(deleteConversationDatasetMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<ImportConversationDataRequest, Operation>
importConversationDataTransportSettings =
HttpJsonCallSettings.<ImportConversationDataRequest, Operation>newBuilder()
.setMethodDescriptor(importConversationDataMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<ListLocationsRequest, ListLocationsResponse>
listLocationsTransportSettings =
HttpJsonCallSettings.<ListLocationsRequest, ListLocationsResponse>newBuilder()
.setMethodDescriptor(listLocationsMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<GetLocationRequest, Location> getLocationTransportSettings =
HttpJsonCallSettings.<GetLocationRequest, Location>newBuilder()
.setMethodDescriptor(getLocationMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
this.createConversationDatasetCallable =
callableFactory.createUnaryCallable(
createConversationDatasetTransportSettings,
settings.createConversationDatasetSettings(),
clientContext);
this.createConversationDatasetOperationCallable =
callableFactory.createOperationCallable(
createConversationDatasetTransportSettings,
settings.createConversationDatasetOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.getConversationDatasetCallable =
callableFactory.createUnaryCallable(
getConversationDatasetTransportSettings,
settings.getConversationDatasetSettings(),
clientContext);
this.listConversationDatasetsCallable =
callableFactory.createUnaryCallable(
listConversationDatasetsTransportSettings,
settings.listConversationDatasetsSettings(),
clientContext);
this.listConversationDatasetsPagedCallable =
callableFactory.createPagedCallable(
listConversationDatasetsTransportSettings,
settings.listConversationDatasetsSettings(),
clientContext);
this.deleteConversationDatasetCallable =
callableFactory.createUnaryCallable(
deleteConversationDatasetTransportSettings,
settings.deleteConversationDatasetSettings(),
clientContext);
this.deleteConversationDatasetOperationCallable =
callableFactory.createOperationCallable(
deleteConversationDatasetTransportSettings,
settings.deleteConversationDatasetOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.importConversationDataCallable =
callableFactory.createUnaryCallable(
importConversationDataTransportSettings,
settings.importConversationDataSettings(),
clientContext);
this.importConversationDataOperationCallable =
callableFactory.createOperationCallable(
importConversationDataTransportSettings,
settings.importConversationDataOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.listLocationsCallable =
callableFactory.createUnaryCallable(
listLocationsTransportSettings, settings.listLocationsSettings(), clientContext);
this.listLocationsPagedCallable =
callableFactory.createPagedCallable(
listLocationsTransportSettings, settings.listLocationsSettings(), clientContext);
this.getLocationCallable =
callableFactory.createUnaryCallable(
getLocationTransportSettings, settings.getLocationSettings(), clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
@InternalApi
public static List<ApiMethodDescriptor> getMethodDescriptors() {
List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>();
methodDescriptors.add(createConversationDatasetMethodDescriptor);
methodDescriptors.add(getConversationDatasetMethodDescriptor);
methodDescriptors.add(listConversationDatasetsMethodDescriptor);
methodDescriptors.add(deleteConversationDatasetMethodDescriptor);
methodDescriptors.add(importConversationDataMethodDescriptor);
methodDescriptors.add(listLocationsMethodDescriptor);
methodDescriptors.add(getLocationMethodDescriptor);
return methodDescriptors;
}
public HttpJsonOperationsStub getHttpJsonOperationsStub() {
return httpJsonOperationsStub;
}
@Override
public UnaryCallable<CreateConversationDatasetRequest, Operation>
createConversationDatasetCallable() {
return createConversationDatasetCallable;
}
@Override
public OperationCallable<
CreateConversationDatasetRequest,
ConversationDataset,
CreateConversationDatasetOperationMetadata>
createConversationDatasetOperationCallable() {
return createConversationDatasetOperationCallable;
}
@Override
public UnaryCallable<GetConversationDatasetRequest, ConversationDataset>
getConversationDatasetCallable() {
return getConversationDatasetCallable;
}
@Override
public UnaryCallable<ListConversationDatasetsRequest, ListConversationDatasetsResponse>
listConversationDatasetsCallable() {
return listConversationDatasetsCallable;
}
@Override
public UnaryCallable<ListConversationDatasetsRequest, ListConversationDatasetsPagedResponse>
listConversationDatasetsPagedCallable() {
return listConversationDatasetsPagedCallable;
}
@Override
public UnaryCallable<DeleteConversationDatasetRequest, Operation>
deleteConversationDatasetCallable() {
return deleteConversationDatasetCallable;
}
@Override
public OperationCallable<
DeleteConversationDatasetRequest, Empty, DeleteConversationDatasetOperationMetadata>
deleteConversationDatasetOperationCallable() {
return deleteConversationDatasetOperationCallable;
}
@Override
public UnaryCallable<ImportConversationDataRequest, Operation> importConversationDataCallable() {
return importConversationDataCallable;
}
@Override
public OperationCallable<
ImportConversationDataRequest,
ImportConversationDataOperationResponse,
ImportConversationDataOperationMetadata>
importConversationDataOperationCallable() {
return importConversationDataOperationCallable;
}
@Override
public UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable() {
return listLocationsCallable;
}
@Override
public UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse>
listLocationsPagedCallable() {
return listLocationsPagedCallable;
}
@Override
public UnaryCallable<GetLocationRequest, Location> getLocationCallable() {
return getLocationCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
|
apache/jackrabbit-oak | 35,799 | oak-lucene/src/main/java/org/apache/lucene/codecs/lucene40/Lucene40PostingsReader.java | /*
* COPIED FROM APACHE LUCENE 4.7.2
*
* Git URL: git@github.com:apache/lucene.git, tag: releases/lucene-solr/4.7.2, path: lucene/core/src/java
*
* (see https://issues.apache.org/jira/browse/OAK-10786 for details)
*/
package org.apache.lucene.codecs.lucene40;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.Arrays;
import org.apache.lucene.codecs.BlockTermState;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.PostingsReaderBase;
import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.index.TermState;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
/**
* Concrete class that reads the 4.0 frq/prox
* postings format.
*
* @see Lucene40PostingsFormat
* @deprecated Only for reading old 4.0 segments */
@Deprecated
public class Lucene40PostingsReader extends PostingsReaderBase {
final static String TERMS_CODEC = "Lucene40PostingsWriterTerms";
final static String FRQ_CODEC = "Lucene40PostingsWriterFrq";
final static String PRX_CODEC = "Lucene40PostingsWriterPrx";
//private static boolean DEBUG = BlockTreeTermsWriter.DEBUG;
// Increment version to change it:
final static int VERSION_START = 0;
final static int VERSION_LONG_SKIP = 1;
final static int VERSION_CURRENT = VERSION_LONG_SKIP;
private final IndexInput freqIn;
private final IndexInput proxIn;
// public static boolean DEBUG = BlockTreeTermsWriter.DEBUG;
int skipInterval;
int maxSkipLevels;
int skipMinimum;
// private String segment;
/** Sole constructor. */
public Lucene40PostingsReader(Directory dir, FieldInfos fieldInfos, SegmentInfo segmentInfo, IOContext ioContext, String segmentSuffix) throws IOException {
boolean success = false;
IndexInput freqIn = null;
IndexInput proxIn = null;
try {
freqIn = dir.openInput(IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, Lucene40PostingsFormat.FREQ_EXTENSION),
ioContext);
CodecUtil.checkHeader(freqIn, FRQ_CODEC, VERSION_START, VERSION_CURRENT);
// TODO: hasProx should (somehow!) become codec private,
// but it's tricky because 1) FIS.hasProx is global (it
// could be all fields that have prox are written by a
// different codec), 2) the field may have had prox in
// the past but all docs w/ that field were deleted.
// Really we'd need to init prxOut lazily on write, and
// then somewhere record that we actually wrote it so we
// know whether to open on read:
if (fieldInfos.hasProx()) {
proxIn = dir.openInput(IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, Lucene40PostingsFormat.PROX_EXTENSION),
ioContext);
CodecUtil.checkHeader(proxIn, PRX_CODEC, VERSION_START, VERSION_CURRENT);
} else {
proxIn = null;
}
this.freqIn = freqIn;
this.proxIn = proxIn;
success = true;
} finally {
if (!success) {
IOUtils.closeWhileHandlingException(freqIn, proxIn);
}
}
}
@Override
public void init(IndexInput termsIn) throws IOException {
// Make sure we are talking to the matching past writer
CodecUtil.checkHeader(termsIn, TERMS_CODEC, VERSION_START, VERSION_CURRENT);
skipInterval = termsIn.readInt();
maxSkipLevels = termsIn.readInt();
skipMinimum = termsIn.readInt();
}
// Must keep final because we do non-standard clone
private final static class StandardTermState extends BlockTermState {
long freqOffset;
long proxOffset;
long skipOffset;
@Override
public StandardTermState clone() {
StandardTermState other = new StandardTermState();
other.copyFrom(this);
return other;
}
@Override
public void copyFrom(TermState _other) {
super.copyFrom(_other);
StandardTermState other = (StandardTermState) _other;
freqOffset = other.freqOffset;
proxOffset = other.proxOffset;
skipOffset = other.skipOffset;
}
@Override
public String toString() {
return super.toString() + " freqFP=" + freqOffset + " proxFP=" + proxOffset + " skipOffset=" + skipOffset;
}
}
@Override
public BlockTermState newTermState() {
return new StandardTermState();
}
@Override
public void close() throws IOException {
try {
if (freqIn != null) {
freqIn.close();
}
} finally {
if (proxIn != null) {
proxIn.close();
}
}
}
@Override
public void decodeTerm(long[] longs, DataInput in, FieldInfo fieldInfo, BlockTermState _termState, boolean absolute)
throws IOException {
final StandardTermState termState = (StandardTermState) _termState;
// if (DEBUG) System.out.println("SPR: nextTerm seg=" + segment + " tbOrd=" + termState.termBlockOrd + " bytesReader.fp=" + termState.bytesReader.getPosition());
final boolean isFirstTerm = termState.termBlockOrd == 0;
if (absolute) {
termState.freqOffset = 0;
termState.proxOffset = 0;
}
termState.freqOffset += in.readVLong();
/*
if (DEBUG) {
System.out.println(" dF=" + termState.docFreq);
System.out.println(" freqFP=" + termState.freqOffset);
}
*/
assert termState.freqOffset < freqIn.length();
if (termState.docFreq >= skipMinimum) {
termState.skipOffset = in.readVLong();
// if (DEBUG) System.out.println(" skipOffset=" + termState.skipOffset + " vs freqIn.length=" + freqIn.length());
assert termState.freqOffset + termState.skipOffset < freqIn.length();
} else {
// undefined
}
if (fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0) {
termState.proxOffset += in.readVLong();
// if (DEBUG) System.out.println(" proxFP=" + termState.proxOffset);
}
}
@Override
public DocsEnum docs(FieldInfo fieldInfo, BlockTermState termState, Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
if (canReuse(reuse, liveDocs)) {
// if (DEBUG) System.out.println("SPR.docs ts=" + termState);
return ((SegmentDocsEnumBase) reuse).reset(fieldInfo, (StandardTermState)termState);
}
return newDocsEnum(liveDocs, fieldInfo, (StandardTermState)termState);
}
private boolean canReuse(DocsEnum reuse, Bits liveDocs) {
if (reuse != null && (reuse instanceof SegmentDocsEnumBase)) {
SegmentDocsEnumBase docsEnum = (SegmentDocsEnumBase) reuse;
// If you are using ParellelReader, and pass in a
// reused DocsEnum, it could have come from another
// reader also using standard codec
if (docsEnum.startFreqIn == freqIn) {
// we only reuse if the the actual the incoming enum has the same liveDocs as the given liveDocs
return liveDocs == docsEnum.liveDocs;
}
}
return false;
}
private DocsEnum newDocsEnum(Bits liveDocs, FieldInfo fieldInfo, StandardTermState termState) throws IOException {
if (liveDocs == null) {
return new AllDocsSegmentDocsEnum(freqIn).reset(fieldInfo, termState);
} else {
return new LiveDocsSegmentDocsEnum(freqIn, liveDocs).reset(fieldInfo, termState);
}
}
@Override
public DocsAndPositionsEnum docsAndPositions(FieldInfo fieldInfo, BlockTermState termState, Bits liveDocs,
DocsAndPositionsEnum reuse, int flags)
throws IOException {
boolean hasOffsets = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;
// TODO: can we optimize if FLAG_PAYLOADS / FLAG_OFFSETS
// isn't passed?
// TODO: refactor
if (fieldInfo.hasPayloads() || hasOffsets) {
SegmentFullPositionsEnum docsEnum;
if (reuse == null || !(reuse instanceof SegmentFullPositionsEnum)) {
docsEnum = new SegmentFullPositionsEnum(freqIn, proxIn);
} else {
docsEnum = (SegmentFullPositionsEnum) reuse;
if (docsEnum.startFreqIn != freqIn) {
// If you are using ParellelReader, and pass in a
// reused DocsEnum, it could have come from another
// reader also using standard codec
docsEnum = new SegmentFullPositionsEnum(freqIn, proxIn);
}
}
return docsEnum.reset(fieldInfo, (StandardTermState) termState, liveDocs);
} else {
SegmentDocsAndPositionsEnum docsEnum;
if (reuse == null || !(reuse instanceof SegmentDocsAndPositionsEnum)) {
docsEnum = new SegmentDocsAndPositionsEnum(freqIn, proxIn);
} else {
docsEnum = (SegmentDocsAndPositionsEnum) reuse;
if (docsEnum.startFreqIn != freqIn) {
// If you are using ParellelReader, and pass in a
// reused DocsEnum, it could have come from another
// reader also using standard codec
docsEnum = new SegmentDocsAndPositionsEnum(freqIn, proxIn);
}
}
return docsEnum.reset(fieldInfo, (StandardTermState) termState, liveDocs);
}
}
static final int BUFFERSIZE = 64;
private abstract class SegmentDocsEnumBase extends DocsEnum {
protected final int[] docs = new int[BUFFERSIZE];
protected final int[] freqs = new int[BUFFERSIZE];
final IndexInput freqIn; // reuse
final IndexInput startFreqIn; // reuse
Lucene40SkipListReader skipper; // reuse - lazy loaded
protected boolean indexOmitsTF; // does current field omit term freq?
protected boolean storePayloads; // does current field store payloads?
protected boolean storeOffsets; // does current field store offsets?
protected int limit; // number of docs in this posting
protected int ord; // how many docs we've read
protected int doc; // doc we last read
protected int accum; // accumulator for doc deltas
protected int freq; // freq we last read
protected int maxBufferedDocId;
protected int start;
protected int count;
protected long freqOffset;
protected long skipOffset;
protected boolean skipped;
protected final Bits liveDocs;
SegmentDocsEnumBase(IndexInput startFreqIn, Bits liveDocs) {
this.startFreqIn = startFreqIn;
this.freqIn = startFreqIn.clone();
this.liveDocs = liveDocs;
}
DocsEnum reset(FieldInfo fieldInfo, StandardTermState termState) throws IOException {
indexOmitsTF = fieldInfo.getIndexOptions() == IndexOptions.DOCS_ONLY;
storePayloads = fieldInfo.hasPayloads();
storeOffsets = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;
freqOffset = termState.freqOffset;
skipOffset = termState.skipOffset;
// TODO: for full enum case (eg segment merging) this
// seek is unnecessary; maybe we can avoid in such
// cases
freqIn.seek(termState.freqOffset);
limit = termState.docFreq;
assert limit > 0;
ord = 0;
doc = -1;
accum = 0;
// if (DEBUG) System.out.println(" sde limit=" + limit + " freqFP=" + freqOffset);
skipped = false;
start = -1;
count = 0;
freq = 1;
if (indexOmitsTF) {
Arrays.fill(freqs, 1);
}
maxBufferedDocId = -1;
return this;
}
@Override
public final int freq() {
return freq;
}
@Override
public final int docID() {
return doc;
}
@Override
public final int advance(int target) throws IOException {
// last doc in our buffer is >= target, binary search + next()
if (++start < count && maxBufferedDocId >= target) {
if ((count-start) > 32) { // 32 seemed to be a sweetspot here so use binsearch if the pending results are a lot
start = binarySearch(count - 1, start, target, docs);
return nextDoc();
} else {
return linearScan(target);
}
}
start = count; // buffer is consumed
return doc = skipTo(target);
}
private final int binarySearch(int hi, int low, int target, int[] docs) {
while (low <= hi) {
int mid = (hi + low) >>> 1;
int doc = docs[mid];
if (doc < target) {
low = mid + 1;
} else if (doc > target) {
hi = mid - 1;
} else {
low = mid;
break;
}
}
return low-1;
}
final int readFreq(final IndexInput freqIn, final int code)
throws IOException {
if ((code & 1) != 0) { // if low bit is set
return 1; // freq is one
} else {
return freqIn.readVInt(); // else read freq
}
}
protected abstract int linearScan(int scanTo) throws IOException;
protected abstract int scanTo(int target) throws IOException;
protected final int refill() throws IOException {
final int doc = nextUnreadDoc();
count = 0;
start = -1;
if (doc == NO_MORE_DOCS) {
return NO_MORE_DOCS;
}
final int numDocs = Math.min(docs.length, limit - ord);
ord += numDocs;
if (indexOmitsTF) {
count = fillDocs(numDocs);
} else {
count = fillDocsAndFreqs(numDocs);
}
maxBufferedDocId = count > 0 ? docs[count-1] : NO_MORE_DOCS;
return doc;
}
protected abstract int nextUnreadDoc() throws IOException;
private final int fillDocs(int size) throws IOException {
final IndexInput freqIn = this.freqIn;
final int docs[] = this.docs;
int docAc = accum;
for (int i = 0; i < size; i++) {
docAc += freqIn.readVInt();
docs[i] = docAc;
}
accum = docAc;
return size;
}
private final int fillDocsAndFreqs(int size) throws IOException {
final IndexInput freqIn = this.freqIn;
final int docs[] = this.docs;
final int freqs[] = this.freqs;
int docAc = accum;
for (int i = 0; i < size; i++) {
final int code = freqIn.readVInt();
docAc += code >>> 1; // shift off low bit
freqs[i] = readFreq(freqIn, code);
docs[i] = docAc;
}
accum = docAc;
return size;
}
private final int skipTo(int target) throws IOException {
if ((target - skipInterval) >= accum && limit >= skipMinimum) {
// There are enough docs in the posting to have
// skip data, and it isn't too close.
if (skipper == null) {
// This is the first time this enum has ever been used for skipping -- do lazy init
skipper = new Lucene40SkipListReader(freqIn.clone(), maxSkipLevels, skipInterval);
}
if (!skipped) {
// This is the first time this posting has
// skipped since reset() was called, so now we
// load the skip data for this posting
skipper.init(freqOffset + skipOffset,
freqOffset, 0,
limit, storePayloads, storeOffsets);
skipped = true;
}
final int newOrd = skipper.skipTo(target);
if (newOrd > ord) {
// Skipper moved
ord = newOrd;
accum = skipper.getDoc();
freqIn.seek(skipper.getFreqPointer());
}
}
return scanTo(target);
}
@Override
public long cost() {
return limit;
}
}
private final class AllDocsSegmentDocsEnum extends SegmentDocsEnumBase {
AllDocsSegmentDocsEnum(IndexInput startFreqIn) {
super(startFreqIn, null);
assert liveDocs == null;
}
@Override
public final int nextDoc() throws IOException {
if (++start < count) {
freq = freqs[start];
return doc = docs[start];
}
return doc = refill();
}
@Override
protected final int linearScan(int scanTo) throws IOException {
final int[] docs = this.docs;
final int upTo = count;
for (int i = start; i < upTo; i++) {
final int d = docs[i];
if (scanTo <= d) {
start = i;
freq = freqs[i];
return doc = docs[i];
}
}
return doc = refill();
}
@Override
protected int scanTo(int target) throws IOException {
int docAcc = accum;
int frq = 1;
final IndexInput freqIn = this.freqIn;
final boolean omitTF = indexOmitsTF;
final int loopLimit = limit;
for (int i = ord; i < loopLimit; i++) {
int code = freqIn.readVInt();
if (omitTF) {
docAcc += code;
} else {
docAcc += code >>> 1; // shift off low bit
frq = readFreq(freqIn, code);
}
if (docAcc >= target) {
freq = frq;
ord = i + 1;
return accum = docAcc;
}
}
ord = limit;
freq = frq;
accum = docAcc;
return NO_MORE_DOCS;
}
@Override
protected final int nextUnreadDoc() throws IOException {
if (ord++ < limit) {
int code = freqIn.readVInt();
if (indexOmitsTF) {
accum += code;
} else {
accum += code >>> 1; // shift off low bit
freq = readFreq(freqIn, code);
}
return accum;
} else {
return NO_MORE_DOCS;
}
}
}
private final class LiveDocsSegmentDocsEnum extends SegmentDocsEnumBase {
LiveDocsSegmentDocsEnum(IndexInput startFreqIn, Bits liveDocs) {
super(startFreqIn, liveDocs);
assert liveDocs != null;
}
@Override
public final int nextDoc() throws IOException {
final Bits liveDocs = this.liveDocs;
for (int i = start+1; i < count; i++) {
int d = docs[i];
if (liveDocs.get(d)) {
start = i;
freq = freqs[i];
return doc = d;
}
}
start = count;
return doc = refill();
}
@Override
protected final int linearScan(int scanTo) throws IOException {
final int[] docs = this.docs;
final int upTo = count;
final Bits liveDocs = this.liveDocs;
for (int i = start; i < upTo; i++) {
int d = docs[i];
if (scanTo <= d && liveDocs.get(d)) {
start = i;
freq = freqs[i];
return doc = docs[i];
}
}
return doc = refill();
}
@Override
protected int scanTo(int target) throws IOException {
int docAcc = accum;
int frq = 1;
final IndexInput freqIn = this.freqIn;
final boolean omitTF = indexOmitsTF;
final int loopLimit = limit;
final Bits liveDocs = this.liveDocs;
for (int i = ord; i < loopLimit; i++) {
int code = freqIn.readVInt();
if (omitTF) {
docAcc += code;
} else {
docAcc += code >>> 1; // shift off low bit
frq = readFreq(freqIn, code);
}
if (docAcc >= target && liveDocs.get(docAcc)) {
freq = frq;
ord = i + 1;
return accum = docAcc;
}
}
ord = limit;
freq = frq;
accum = docAcc;
return NO_MORE_DOCS;
}
@Override
protected final int nextUnreadDoc() throws IOException {
int docAcc = accum;
int frq = 1;
final IndexInput freqIn = this.freqIn;
final boolean omitTF = indexOmitsTF;
final int loopLimit = limit;
final Bits liveDocs = this.liveDocs;
for (int i = ord; i < loopLimit; i++) {
int code = freqIn.readVInt();
if (omitTF) {
docAcc += code;
} else {
docAcc += code >>> 1; // shift off low bit
frq = readFreq(freqIn, code);
}
if (liveDocs.get(docAcc)) {
freq = frq;
ord = i + 1;
return accum = docAcc;
}
}
ord = limit;
freq = frq;
accum = docAcc;
return NO_MORE_DOCS;
}
}
// TODO specialize DocsAndPosEnum too
// Decodes docs & positions. payloads nor offsets are present.
private final class SegmentDocsAndPositionsEnum extends DocsAndPositionsEnum {
final IndexInput startFreqIn;
private final IndexInput freqIn;
private final IndexInput proxIn;
int limit; // number of docs in this posting
int ord; // how many docs we've read
int doc = -1; // doc we last read
int accum; // accumulator for doc deltas
int freq; // freq we last read
int position;
Bits liveDocs;
long freqOffset;
long skipOffset;
long proxOffset;
int posPendingCount;
boolean skipped;
Lucene40SkipListReader skipper;
private long lazyProxPointer;
public SegmentDocsAndPositionsEnum(IndexInput freqIn, IndexInput proxIn) {
startFreqIn = freqIn;
this.freqIn = freqIn.clone();
this.proxIn = proxIn.clone();
}
public SegmentDocsAndPositionsEnum reset(FieldInfo fieldInfo, StandardTermState termState, Bits liveDocs) throws IOException {
assert fieldInfo.getIndexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
assert !fieldInfo.hasPayloads();
this.liveDocs = liveDocs;
// TODO: for full enum case (eg segment merging) this
// seek is unnecessary; maybe we can avoid in such
// cases
freqIn.seek(termState.freqOffset);
lazyProxPointer = termState.proxOffset;
limit = termState.docFreq;
assert limit > 0;
ord = 0;
doc = -1;
accum = 0;
position = 0;
skipped = false;
posPendingCount = 0;
freqOffset = termState.freqOffset;
proxOffset = termState.proxOffset;
skipOffset = termState.skipOffset;
// if (DEBUG) System.out.println("StandardR.D&PE reset seg=" + segment + " limit=" + limit + " freqFP=" + freqOffset + " proxFP=" + proxOffset);
return this;
}
@Override
public int nextDoc() throws IOException {
// if (DEBUG) System.out.println("SPR.nextDoc seg=" + segment + " freqIn.fp=" + freqIn.getFilePointer());
while(true) {
if (ord == limit) {
// if (DEBUG) System.out.println(" return END");
return doc = NO_MORE_DOCS;
}
ord++;
// Decode next doc/freq pair
final int code = freqIn.readVInt();
accum += code >>> 1; // shift off low bit
if ((code & 1) != 0) { // if low bit is set
freq = 1; // freq is one
} else {
freq = freqIn.readVInt(); // else read freq
}
posPendingCount += freq;
if (liveDocs == null || liveDocs.get(accum)) {
break;
}
}
position = 0;
// if (DEBUG) System.out.println(" return doc=" + doc);
return (doc = accum);
}
@Override
public int docID() {
return doc;
}
@Override
public int freq() {
return freq;
}
@Override
public int advance(int target) throws IOException {
//System.out.println("StandardR.D&PE advance target=" + target);
if ((target - skipInterval) >= doc && limit >= skipMinimum) {
// There are enough docs in the posting to have
// skip data, and it isn't too close
if (skipper == null) {
// This is the first time this enum has ever been used for skipping -- do lazy init
skipper = new Lucene40SkipListReader(freqIn.clone(), maxSkipLevels, skipInterval);
}
if (!skipped) {
// This is the first time this posting has
// skipped, since reset() was called, so now we
// load the skip data for this posting
skipper.init(freqOffset+skipOffset,
freqOffset, proxOffset,
limit, false, false);
skipped = true;
}
final int newOrd = skipper.skipTo(target);
if (newOrd > ord) {
// Skipper moved
ord = newOrd;
doc = accum = skipper.getDoc();
freqIn.seek(skipper.getFreqPointer());
lazyProxPointer = skipper.getProxPointer();
posPendingCount = 0;
position = 0;
}
}
// Now, linear scan for the rest:
do {
nextDoc();
} while (target > doc);
return doc;
}
@Override
public int nextPosition() throws IOException {
if (lazyProxPointer != -1) {
proxIn.seek(lazyProxPointer);
lazyProxPointer = -1;
}
// scan over any docs that were iterated without their positions
if (posPendingCount > freq) {
position = 0;
while(posPendingCount != freq) {
if ((proxIn.readByte() & 0x80) == 0) {
posPendingCount--;
}
}
}
position += proxIn.readVInt();
posPendingCount--;
assert posPendingCount >= 0: "nextPosition() was called too many times (more than freq() times) posPendingCount=" + posPendingCount;
return position;
}
@Override
public int startOffset() {
return -1;
}
@Override
public int endOffset() {
return -1;
}
/** Returns the payload at this position, or null if no
* payload was indexed. */
@Override
public BytesRef getPayload() throws IOException {
return null;
}
@Override
public long cost() {
return limit;
}
}
// Decodes docs & positions & (payloads and/or offsets)
private class SegmentFullPositionsEnum extends DocsAndPositionsEnum {
final IndexInput startFreqIn;
private final IndexInput freqIn;
private final IndexInput proxIn;
int limit; // number of docs in this posting
int ord; // how many docs we've read
int doc = -1; // doc we last read
int accum; // accumulator for doc deltas
int freq; // freq we last read
int position;
Bits liveDocs;
long freqOffset;
long skipOffset;
long proxOffset;
int posPendingCount;
int payloadLength;
boolean payloadPending;
boolean skipped;
Lucene40SkipListReader skipper;
private BytesRef payload;
private long lazyProxPointer;
boolean storePayloads;
boolean storeOffsets;
int offsetLength;
int startOffset;
public SegmentFullPositionsEnum(IndexInput freqIn, IndexInput proxIn) {
startFreqIn = freqIn;
this.freqIn = freqIn.clone();
this.proxIn = proxIn.clone();
}
public SegmentFullPositionsEnum reset(FieldInfo fieldInfo, StandardTermState termState, Bits liveDocs) throws IOException {
storeOffsets = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;
storePayloads = fieldInfo.hasPayloads();
assert fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0;
assert storePayloads || storeOffsets;
if (payload == null) {
payload = new BytesRef();
payload.bytes = new byte[1];
}
this.liveDocs = liveDocs;
// TODO: for full enum case (eg segment merging) this
// seek is unnecessary; maybe we can avoid in such
// cases
freqIn.seek(termState.freqOffset);
lazyProxPointer = termState.proxOffset;
limit = termState.docFreq;
ord = 0;
doc = -1;
accum = 0;
position = 0;
startOffset = 0;
skipped = false;
posPendingCount = 0;
payloadPending = false;
freqOffset = termState.freqOffset;
proxOffset = termState.proxOffset;
skipOffset = termState.skipOffset;
//System.out.println("StandardR.D&PE reset seg=" + segment + " limit=" + limit + " freqFP=" + freqOffset + " proxFP=" + proxOffset + " this=" + this);
return this;
}
@Override
public int nextDoc() throws IOException {
while(true) {
if (ord == limit) {
//System.out.println("StandardR.D&PE seg=" + segment + " nextDoc return doc=END");
return doc = NO_MORE_DOCS;
}
ord++;
// Decode next doc/freq pair
final int code = freqIn.readVInt();
accum += code >>> 1; // shift off low bit
if ((code & 1) != 0) { // if low bit is set
freq = 1; // freq is one
} else {
freq = freqIn.readVInt(); // else read freq
}
posPendingCount += freq;
if (liveDocs == null || liveDocs.get(accum)) {
break;
}
}
position = 0;
startOffset = 0;
//System.out.println("StandardR.D&PE nextDoc seg=" + segment + " return doc=" + doc);
return (doc = accum);
}
@Override
public int docID() {
return doc;
}
@Override
public int freq() throws IOException {
return freq;
}
@Override
public int advance(int target) throws IOException {
//System.out.println("StandardR.D&PE advance seg=" + segment + " target=" + target + " this=" + this);
if ((target - skipInterval) >= doc && limit >= skipMinimum) {
// There are enough docs in the posting to have
// skip data, and it isn't too close
if (skipper == null) {
// This is the first time this enum has ever been used for skipping -- do lazy init
skipper = new Lucene40SkipListReader(freqIn.clone(), maxSkipLevels, skipInterval);
}
if (!skipped) {
// This is the first time this posting has
// skipped, since reset() was called, so now we
// load the skip data for this posting
//System.out.println(" init skipper freqOffset=" + freqOffset + " skipOffset=" + skipOffset + " vs len=" + freqIn.length());
skipper.init(freqOffset+skipOffset,
freqOffset, proxOffset,
limit, storePayloads, storeOffsets);
skipped = true;
}
final int newOrd = skipper.skipTo(target);
if (newOrd > ord) {
// Skipper moved
ord = newOrd;
doc = accum = skipper.getDoc();
freqIn.seek(skipper.getFreqPointer());
lazyProxPointer = skipper.getProxPointer();
posPendingCount = 0;
position = 0;
startOffset = 0;
payloadPending = false;
payloadLength = skipper.getPayloadLength();
offsetLength = skipper.getOffsetLength();
}
}
// Now, linear scan for the rest:
do {
nextDoc();
} while (target > doc);
return doc;
}
@Override
public int nextPosition() throws IOException {
if (lazyProxPointer != -1) {
proxIn.seek(lazyProxPointer);
lazyProxPointer = -1;
}
if (payloadPending && payloadLength > 0) {
// payload of last position was never retrieved -- skip it
proxIn.seek(proxIn.getFilePointer() + payloadLength);
payloadPending = false;
}
// scan over any docs that were iterated without their positions
while(posPendingCount > freq) {
final int code = proxIn.readVInt();
if (storePayloads) {
if ((code & 1) != 0) {
// new payload length
payloadLength = proxIn.readVInt();
assert payloadLength >= 0;
}
assert payloadLength != -1;
}
if (storeOffsets) {
if ((proxIn.readVInt() & 1) != 0) {
// new offset length
offsetLength = proxIn.readVInt();
}
}
if (storePayloads) {
proxIn.seek(proxIn.getFilePointer() + payloadLength);
}
posPendingCount--;
position = 0;
startOffset = 0;
payloadPending = false;
//System.out.println("StandardR.D&PE skipPos");
}
// read next position
if (payloadPending && payloadLength > 0) {
// payload wasn't retrieved for last position
proxIn.seek(proxIn.getFilePointer()+payloadLength);
}
int code = proxIn.readVInt();
if (storePayloads) {
if ((code & 1) != 0) {
// new payload length
payloadLength = proxIn.readVInt();
assert payloadLength >= 0;
}
assert payloadLength != -1;
payloadPending = true;
code >>>= 1;
}
position += code;
if (storeOffsets) {
int offsetCode = proxIn.readVInt();
if ((offsetCode & 1) != 0) {
// new offset length
offsetLength = proxIn.readVInt();
}
startOffset += offsetCode >>> 1;
}
posPendingCount--;
assert posPendingCount >= 0: "nextPosition() was called too many times (more than freq() times) posPendingCount=" + posPendingCount;
//System.out.println("StandardR.D&PE nextPos return pos=" + position);
return position;
}
@Override
public int startOffset() throws IOException {
return storeOffsets ? startOffset : -1;
}
@Override
public int endOffset() throws IOException {
return storeOffsets ? startOffset + offsetLength : -1;
}
/** Returns the payload at this position, or null if no
* payload was indexed. */
@Override
public BytesRef getPayload() throws IOException {
if (storePayloads) {
if (payloadLength <= 0) {
return null;
}
assert lazyProxPointer == -1;
assert posPendingCount < freq;
if (payloadPending) {
if (payloadLength > payload.bytes.length) {
payload.grow(payloadLength);
}
proxIn.readBytes(payload.bytes, 0, payloadLength);
payload.length = payloadLength;
payloadPending = false;
}
return payload;
} else {
return null;
}
}
@Override
public long cost() {
return limit;
}
}
@Override
public long ramBytesUsed() {
return 0;
}
}
|
apache/sis | 36,200 | endorsed/src/org.apache.sis.util/main/org/apache/sis/setup/About.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.setup;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
import java.util.EnumSet;
import java.util.Map;
import java.util.LinkedHashMap;
import java.util.Iterator;
import java.util.Locale;
import java.util.Date;
import java.util.TimeZone;
import java.util.MissingResourceException;
import java.util.jar.Attributes;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import java.util.logging.Level;
import java.util.logging.Handler;
import java.io.File;
import java.io.IOException;
import java.text.Format;
import java.text.DateFormat;
import java.text.FieldPosition;
import java.nio.file.Path;
import java.nio.charset.Charset;
import static java.lang.System.getProperty;
import static java.util.logging.Logger.getLogger;
import org.apache.sis.util.ArgumentChecks;
import org.apache.sis.util.CharSequences;
import org.apache.sis.util.Exceptions;
import org.apache.sis.util.Version;
import org.apache.sis.util.logging.Logging;
import org.apache.sis.util.resources.Messages;
import org.apache.sis.util.resources.Vocabulary;
import org.apache.sis.util.collection.TreeTable;
import org.apache.sis.util.collection.TreeTables;
import org.apache.sis.util.collection.DefaultTreeTable;
import org.apache.sis.util.internal.shared.MetadataServices;
import org.apache.sis.util.internal.shared.Constants;
import org.apache.sis.system.Loggers;
import org.apache.sis.system.Modules;
import org.apache.sis.system.Shutdown;
import org.apache.sis.system.DataDirectory;
import static org.apache.sis.util.collection.TableColumn.NAME;
import static org.apache.sis.util.collection.TableColumn.VALUE_AS_TEXT;
import static org.apache.sis.util.internal.shared.Constants.UTC;
/**
* Provides information about the Apache SIS running environment.
* This class collects information from various places like {@link Version#SIS},
* {@link System#getProperties()}, {@link Locale#getDefault()} or {@link TimeZone#getDefault()}.
* This class does not collect every possible information. Instead, it tries to focus on the most
* important information for SIS, as determined by experience in troubleshooting.
* Some of those information are:
*
* <ul>
* <li>Version numbers (SIS, Java, Operation system).</li>
* <li>Default locale, timezone and character encoding.</li>
* <li>Current directory, user home and Java home.</li>
* <li>Libraries on the module path.</li>
* </ul>
*
* @author Martin Desruisseaux (Geomatys)
* @version 1.5
* @since 0.3
*/
public enum About {
/**
* Information about software version numbers.
* This section includes:
*
* <ul>
* <li>Apache SIS version</li>
* <li>Java runtime version and vendor</li>
* <li>Operation system name and version</li>
* <li>EPSG geodetic dataset in use</li>
* </ul>
*/
VERSIONS(Vocabulary.Keys.Versions),
/**
* Information about default locale, timezone and character encoding.
* This section includes:
*
* <ul>
* <li>Default locale, completed by ISO 3-letter codes</li>
* <li>Default timezone, completed by timezone offset</li>
* <li>Current date and time in the default timezone</li>
* <li>Default character encoding</li>
* </ul>
*/
LOCALIZATION(Vocabulary.Keys.Localization),
/**
* Information about available plugins.
* This section includes:
*
* <ul>
* <li>List of data store implementations</li>
* </ul>
*
* @since 0.8
*/
PLUGINS(Vocabulary.Keys.Plugins),
/**
* Information about logging.
*/
LOGGING(Vocabulary.Keys.Logging),
/**
* Information about user home directory, java installation directory or other kind of data.
* This section includes:
*
* <ul>
* <li>User directory</li>
* <li>Default directory</li>
* <li>SIS data directory</li>
* <li>Temporary directory</li>
* <li>Java home directory</li>
* </ul>
*/
PATHS(Vocabulary.Keys.Paths),
/**
* Information about the libraries.
* This section includes:
*
* <ul>
* <li>JAR files in the extension directories</li>
* <li>JAR files and directories in the application module path</li>
* </ul>
*/
LIBRARIES(Vocabulary.Keys.Libraries);
/**
* The resource key for this section in the {@link Vocabulary} resources bundle.
*/
private final short resourceKey;
/**
* Creates a new section to be formatted using the given resource.
*/
private About(final short resourceKey) {
this.resourceKey = resourceKey;
}
/**
* Returns all known information about the current Apache SIS running environment.
* The information are formatted using the system default locale and timezone.
*
* <p>This convenience method is equivalent to the following code:</p>
*
* {@snippet lang="java" :
* return configuration(EnumSet.allOf(About.class), null, null);
* }
*
* @return configuration information, as a tree for grouping some configuration by sections.
*/
public static TreeTable configuration() {
return configuration(EnumSet.allOf(About.class), null, null);
}
/**
* Returns a subset of the information about the current Apache SIS running environment.
*
* @param sections the section for which information are desired.
* @param locale the locale to use for formatting the texts in the tree, or {@code null} for the default.
* @param timezone the timezone to use for formatting the dates, or {@code null} for the default.
* @return configuration information, as a tree for grouping some configuration by sections.
*/
public static TreeTable configuration(final Set<About> sections, Locale locale, final TimeZone timezone) {
ArgumentChecks.ensureNonNull("sections", sections);
final Locale formatLocale;
if (locale != null) {
formatLocale = locale;
} else {
locale = Locale.getDefault(Locale.Category.DISPLAY);
formatLocale = Locale.getDefault(Locale.Category.FORMAT);
}
String userHome = null;
String javaHome = null;
final Date now = new Date();
final Vocabulary resources = Vocabulary.forLocale(locale);
final DefaultTreeTable table = new DefaultTreeTable(NAME, VALUE_AS_TEXT);
final TreeTable.Node root = table.getRoot();
root.setValue(NAME, resources.getString(Vocabulary.Keys.LocalConfiguration));
table.setRoot(root);
/*
* Begin with the "Versions" section. The `newSection` variable will be updated in the
* switch statement when new section will begin, and reset to `null` after the `section`
* variable has been updated accordingly.
*/
TreeTable.Node section = null;
About newSection = VERSIONS;
fill: for (int i=0; ; i++) {
short nameKey = 0; // The Vocabulary.Key for `name`, used only if name is null.
String name = null; // The value to put in the "Name" column of the table.
Object value = null; // The value to put in the "Value" column of the table.
String[] children = null; // Optional children to write below the node.
switch (i) {
case 0: {
if (sections.contains(VERSIONS)) {
name = "Apache SIS";
value = Version.SIS;
}
break;
}
case 1: {
if (sections.contains(VERSIONS)) {
name = "Java";
value = concatenate(getProperty("java.version"), getProperty("java.vendor"), true);
}
break;
}
case 2: {
if (sections.contains(VERSIONS)) {
name = "JavaFX";
value = getProperty("javafx.version");
}
break;
}
case 3: {
if (sections.contains(VERSIONS)) {
nameKey = Vocabulary.Keys.OperatingSystem;
value = concatenate(concatenate(getProperty("os.name"),
getProperty("os.version"), false), getProperty("os.arch"), true);
}
break;
}
case 4: {
if (sections.contains(VERSIONS)) {
nameKey = Vocabulary.Keys.Container;
value = Shutdown.getContainer(); // Sometimes contains version information.
}
break;
}
case 5: {
if (sections.contains(VERSIONS)) {
nameKey = Vocabulary.Keys.GeodeticDataset;
value = MetadataServices.getInstance().getInformation(Constants.EPSG, locale);
}
break;
}
case 6: {
newSection = LOCALIZATION;
if (sections.contains(LOCALIZATION)) {
final Locale current = Locale.getDefault();
if (current != null) {
nameKey = Vocabulary.Keys.Locale;
value = current.getDisplayName(locale);
final CharSequence code = concatenate(getCode(locale, false), getCode(locale, true), true);
if (code != null) {
children = new String[] {resources.getString(Vocabulary.Keys.Code_1, "ISO"), code.toString()};
}
}
}
break;
}
case 7: {
if (sections.contains(LOCALIZATION)) {
final TimeZone current = TimeZone.getDefault();
if (current != null) {
nameKey = Vocabulary.Keys.Timezone;
final boolean inDaylightTime = current.inDaylightTime(now);
value = concatenate(current.getDisplayName(inDaylightTime, TimeZone.LONG, locale), current.getID(), true);
final DateFormat df = DateFormat.getTimeInstance(DateFormat.SHORT, formatLocale);
df.setTimeZone(TimeZone.getTimeZone(UTC));
int offset = current.getOffset(now.getTime());
StringBuffer buffer = format(df, offset, new StringBuffer("UTC "));
offset -= current.getRawOffset();
if (offset != 0) {
buffer = format(df, offset, buffer.append(" (")
.append(resources.getString(Vocabulary.Keys.DaylightTime)).append(' ')).append(')');
}
children = new String[] {resources.getString(Vocabulary.Keys.Offset), buffer.toString()};
}
}
break;
}
case 8: {
if (sections.contains(LOCALIZATION)) {
nameKey = Vocabulary.Keys.CurrentDateTime;
final DateFormat df = DateFormat.getDateTimeInstance(DateFormat.LONG, DateFormat.LONG, formatLocale);
if (timezone != null) {
df.setTimeZone(timezone);
}
value = df.format(now);
}
break;
}
case 9: {
if (sections.contains(LOCALIZATION)) {
final Charset current = Charset.defaultCharset();
if (current != null) {
nameKey = Vocabulary.Keys.CharacterEncoding;
value = current.displayName(locale);
final Set<String> aliases = current.aliases();
if (aliases != null && !aliases.isEmpty()) {
final StringBuilder buffer = new StringBuilder((String) value);
String separator = " (";
for (final String alias : aliases) {
buffer.append(separator).append(alias);
separator = ", ";
}
value = buffer.append(')');
}
}
}
break;
}
case 10: {
newSection = PLUGINS;
if (sections.contains(PLUGINS)) try {
children = (String[]) Class.forName("org.apache.sis.storage.base.Capability")
.getMethod("providers", Locale.class, Vocabulary.class).invoke(null, locale, resources);
value = resources.getString(Vocabulary.Keys.EntryCount_1, children.length / 2);
} catch (ClassNotFoundException e) {
// org.apache.sis.storage module not on the module path.
Logging.recoverableException(getLogger(Modules.STORAGE), About.class, "configuration", e);
} catch (ReflectiveOperationException e) {
value = Exceptions.unwrap(e).toString();
}
nameKey = Vocabulary.Keys.DataFormats;
break;
}
case 11: {
newSection = LOGGING;
if (sections.contains(LOGGING)) {
nameKey = Vocabulary.Keys.Implementation;
value = "java.util.logging";
for (final Handler handler : getLogger("").getHandlers()) {
final String c = handler.getClass().getPackageName();
if (!value.equals(c)) {
value = c;
break;
}
}
}
break;
}
case 12: {
if (sections.contains(LOGGING)) {
nameKey = Vocabulary.Keys.Level;
final Level level = getLogger("").getLevel(); // Root logger level.
if (level == null) {
// May happen when some code outside Apache SIS define their own loggers.
value = resources.getString(Vocabulary.Keys.NotKnown);
} else {
value = level.getLocalizedName();
final Map<String,Level> levels = Loggers.getEffectiveLevels();
if (levels.size() != 1 || !level.equals(levels.get(Loggers.ROOT))) {
int j = 0;
children = new String[levels.size() * 2];
for (final Map.Entry<String,Level> entry : levels.entrySet()) {
children[j++] = entry.getKey();
children[j++] = entry.getValue().getLocalizedName();
}
}
}
}
break;
}
case 13: {
newSection = PATHS;
if (sections.contains(PATHS)) {
nameKey = Vocabulary.Keys.UserHome;
value = userHome = getProperty("user.home");
}
break;
}
case 14: {
if (sections.contains(PATHS)) {
nameKey = Vocabulary.Keys.CurrentDirectory;
value = getProperty("user.dir");
}
break;
}
case 15: {
if (sections.contains(PATHS)) {
nameKey = Vocabulary.Keys.DataDirectory;
try {
value = DataDirectory.getenv();
} catch (SecurityException e) {
value = e.toString();
}
if (value == null) {
value = Messages.forLocale(locale).getString(Messages.Keys.DataDirectoryNotSpecified_1, DataDirectory.ENV);
} else {
final Path path = DataDirectory.getRootDirectory();
if (path != null) {
value = path.toString();
} else {
value = value + " (" + resources.getString(Vocabulary.Keys.Invalid) + ')';
}
}
}
break;
}
case 16: {
if (sections.contains(PATHS)) {
nameKey = Vocabulary.Keys.DataBase;
value = MetadataServices.getInstance().getInformation("DataSource", locale);
}
break;
}
case 17: {
if (sections.contains(PATHS)) {
nameKey = Vocabulary.Keys.JavaHome;
value = javaHome = getProperty("java.home");
}
break;
}
case 18: {
if (sections.contains(PATHS)) {
nameKey = Vocabulary.Keys.TemporaryFiles;
value = getProperty("java.io.tmpdir");
}
break;
}
case 19: {
newSection = LIBRARIES;
if (sections.contains(LIBRARIES)) {
nameKey = Vocabulary.Keys.ModulePath;
value = modulePath(getProperty("jdk.module.path"), false);
}
break;
}
case 20: {
if (sections.contains(LIBRARIES)) {
nameKey = Vocabulary.Keys.Classpath;
value = modulePath(getProperty("java.class.path"), true);
}
break;
}
default: break fill;
}
/*
* At this point, we have the information about one node to create.
* If the `newSection` variable is non-null, then this new node shall
* appear in a new section.
*/
if (value == null) {
continue;
}
if (newSection != null) {
section = root.newChild();
section.setValue(NAME, resources.getString(newSection.resourceKey));
newSection = null;
}
if (name == null) {
name = resources.getString(nameKey);
}
@SuppressWarnings("null") // `section` is non-null because of initially non-null `newSection`.
final TreeTable.Node node = section.newChild();
node.setValue(NAME, name);
if (children != null) {
for (int j=0; j<children.length; j+=2) {
final String c = children[j+1];
if (c != null) {
final TreeTable.Node child = node.newChild();
child.setValue(NAME, children[j]);
child.setValue(VALUE_AS_TEXT, c);
}
}
}
if (!(value instanceof Map<?,?>)) {
node.setValue(VALUE_AS_TEXT, value.toString());
continue;
}
/*
* Special case for values of kind Map<File,String>.
* They are extension paths or application class paths.
*/
final Map<?,?> paths = (Map<?,?>) value;
pathTree: for (int j=0; ; j++) {
TreeTable.Node directory = null;
final String home;
final short homeKey;
switch (j) {
case 0: home = javaHome; homeKey = Vocabulary.Keys.JavaHome; break;
case 1: home = userHome; homeKey = Vocabulary.Keys.UserHome; break;
case 2: home = ""; homeKey = 0; directory = node; break;
default: break pathTree;
}
if (home == null) {
// Should never happen since "user.home" and "java.home" are
// standard properties of the Java platform, but let be safe.
continue;
}
final File homeDirectory = home.isEmpty() ? null : new File(home);
for (final Iterator<? extends Map.Entry<?,?>> it=paths.entrySet().iterator(); it.hasNext();) {
final Map.Entry<?,?> entry = it.next();
File file = (File) entry.getKey();
if (homeDirectory != null) {
file = relativize(homeDirectory, file);
if (file == null) continue;
}
if (directory == null) {
directory = node.newChild();
directory.setValue(NAME, parenthesis(resources.getString(homeKey)));
}
CharSequence title = (CharSequence) entry.getValue();
if (title == null || title.length() == 0) {
title = parenthesis(resources.getString(file.isDirectory() ?
Vocabulary.Keys.Directory : Vocabulary.Keys.Untitled).toLowerCase(locale));
}
TreeTables.nodeForPath(directory, NAME, file).setValue(VALUE_AS_TEXT, title);
it.remove();
}
if (directory != null) {
concatenateSingletons(directory, true);
omitMavenRedundancy(directory);
}
}
}
TreeTables.replaceCharSequences(table, locale);
return table;
}
/**
* Returns a map of all JAR files or class directories found in the given paths,
* associated to a description obtained from their {@code META-INF/MANIFEST.MF}.
*
* @param paths the paths using the {@link File#pathSeparatorChar} separator.
* @param classpath whether to scan the class-path manifest attribute.
* @return the paths, or {@code null} if none.
*/
private static Map<File,CharSequence> modulePath(final String paths, final boolean classpath) {
final Map<File,CharSequence> files = new LinkedHashMap<>();
return modulePath(paths, File.pathSeparatorChar, classpath, null, files) ? files : null;
}
/**
* Implementation of {@link #modulePath(String, boolean)} to be invoked recursively.
* The {@code paths} argument may contain many paths separated by the given separator.
* That separator is usually {@link File#pathSeparatorChar}, except for the value of
* the {@code MANIFEST.MF} attribute in which case the separator is a space.
*
* @param paths the paths using the specified path separator.
* @param separator the path separator: {@link File#pathSeparatorChar} or space.
* @param classpath whether to scan the class-path manifest attribute.
* @param directory the directory of {@code MANIFEST.MF} classpath, or {@code null}.
* @param files where to add the paths.
* @return {@code true} if the given map has been changed as a result of this method call.
*/
private static boolean modulePath(final String paths, final char separator, final boolean classpath,
final File directory, final Map<File,CharSequence> files)
{
if (paths == null) {
return false;
}
boolean changed = false;
for (final CharSequence path : CharSequences.split(paths, separator)) {
final File file = new File(directory, path.toString());
if (file.isFile()) {
if (!files.containsKey(file)) {
files.put(file, null);
changed = true;
}
} else if (file.isDirectory()) {
final File[] list = file.listFiles((pathname) -> pathname.getName().endsWith(".jar"));
if (list != null) {
Arrays.sort(list);
for (final File ext : list) {
if (!files.containsKey(ext)) {
files.put(ext, null);
changed = true;
}
}
}
}
}
if (!changed) {
return false;
}
/*
* At this point, we have collected all JAR files. Now set the description from the
* MANIFEST.MF file and scan recursively for the classpath declared in the manifest.
*/
IOException error = null;
for (final Map.Entry<File,CharSequence> entry : files.entrySet()) {
if (entry.getValue() != null) {
continue; // This file has already been processed by a recursive method invocation.
}
final File file = entry.getKey();
if (file.isFile() && file.canRead()) {
try (JarFile jar = new JarFile(file)) {
final Manifest manifest = jar.getManifest();
if (manifest != null) {
final Attributes attributes = manifest.getMainAttributes();
if (attributes != null) {
CharSequence title;
title = concatenate(attributes.getValue(Attributes.Name.IMPLEMENTATION_TITLE),
attributes.getValue(Attributes.Name.IMPLEMENTATION_VERSION), false);
if (title == null) {
title = concatenate(attributes.getValue(Attributes.Name.SPECIFICATION_TITLE),
attributes.getValue(Attributes.Name.SPECIFICATION_VERSION), false);
if (title == null) {
// We really need a non-null value in order to protect this code
// against infinite recursion.
title = "";
}
}
entry.setValue(title);
/*
* If scanning a class path, this JAR file implicitly adds the content of the
* CLASS-PATH attribute as transitive dependencies. If scanning a module path,
* this is ignored.
*/
if (classpath) {
String cp = attributes.getValue(Attributes.Name.CLASS_PATH);
if (modulePath(cp, ' ', true, file.getParentFile(), files)) {
break; // Necessary for avoiding ConcurrentModificationException.
}
}
}
}
} catch (IOException e) {
if (error == null) {
error = e;
} else {
error.addSuppressed(e);
}
}
}
}
if (error != null) {
Logging.unexpectedException(getLogger(Modules.UTILITIES), About.class, "configuration", error);
}
return true;
}
/**
* If a file path in the given node or any children follow the Maven pattern, remove the
* artifact name and version numbers redundancies in order to make the name more compact.
* For example, this method replaces {@code "org/opengis/geoapi/3.0.0/geoapi-3.0.0.jar"}
* by {@code "org/opengis/(…)/geoapi-3.0.0.jar"}.
*/
private static void omitMavenRedundancy(final TreeTable.Node node) {
for (final TreeTable.Node child : node.getChildren()) {
omitMavenRedundancy(child);
}
final CharSequence name = node.getValue(NAME);
final int length = name.length();
final int s2 = CharSequences.lastIndexOf(name, File.separatorChar, 0, length);
if (s2 >= 0) {
final int s1 = CharSequences.lastIndexOf(name, File.separatorChar, 0, s2);
if (s1 >= 0) {
final int s0 = CharSequences.lastIndexOf(name, File.separatorChar, 0, s1) + 1;
final StringBuilder buffer = new StringBuilder(s2 - s0).append(name, s0, s2);
buffer.setCharAt(s1 - s0, '-');
if (CharSequences.regionMatches(name, s2+1, buffer)) {
buffer.setLength(0);
node.setValue(NAME, buffer.append(name, 0, s0).append("(…)").append(name, s2, length));
}
}
}
}
/**
* For every branch containing only one child and no value, merges in-place that branch and the
* node together. This method is used for simplifying depth trees into something less verbose.
* However for any column other than {@code NAME}, this method preserves the values of the child
* node but lost all value of the parent node. For this reason, we perform the merge only if the
* parent has no value.
*
* <p>See the <q>Reduce the depth of a tree</q> example in {@link TreeTables} for more information.
* In particular, note that this implementation assumes that children collections are {@link List} (this is
* guaranteed for {@link DefaultTreeTable.Node} implementations).</p>
*
* @param node the root of the node to simplify.
* @param skip {@code true} for disabling concatenation of root node.
* @return the root of the simplified tree. May be the given {@code node} or a child.
*/
private static TreeTable.Node concatenateSingletons(final TreeTable.Node node, final boolean skip) {
// DefaultTreeTable.Node instances are known to handle their children in a List.
final List<TreeTable.Node> children = (List<TreeTable.Node>) node.getChildren();
final int size = children.size();
for (int i=0; i<size; i++) {
children.set(i, concatenateSingletons(children.get(i), false));
}
if (!skip && size == 1) {
if (node.getValue(VALUE_AS_TEXT) == null) {
final TreeTable.Node child = children.remove(0);
final StringBuilder name = new StringBuilder(node.getValue(NAME));
if (!File.separator.contentEquals(name)) {
name.append(File.separatorChar);
}
child.setValue(NAME, name.append(child.getValue(NAME)));
return child;
}
}
return node;
}
/**
* Concatenates the given strings in the format "main (complement)".
* Any of the given strings can be null.
*
* @param main the main string to show first, or {@code null}.
* @param complement the string to show after the main one, or {@code null}.
* @param parenthesis {@code true} for writing the complement between parenthesis, or {@code null}.
* @return the concatenated string, or {@code null} if all components are null.
*/
private static CharSequence concatenate(final CharSequence main, final CharSequence complement, final boolean parenthesis) {
if (main != null && main.length() != 0) {
if (complement != null && complement.length() != 0) {
final StringBuilder buffer = (main instanceof StringBuilder)
? (StringBuilder) main : new StringBuilder(main);
buffer.append(' ');
if (parenthesis) buffer.append('(');
buffer.append(complement);
if (parenthesis) buffer.append(')');
return buffer;
}
return main;
}
return complement;
}
/**
* Returns the given text between parenthesis.
*/
private static CharSequence parenthesis(final String text) {
return new StringBuilder(text.length() + 2).append('(').append(text).append(')');
}
/**
* Returns the ISO language or country code for the given locale.
* Whether we use 2-letters or 3-letters code shall be consistent
* with {@link org.apache.sis.xml.ValueConverter}.
*/
private static String getCode(final Locale locale, final boolean country) {
try {
return country ? locale.getCountry() : locale.getISO3Language();
} catch (MissingResourceException e) {
Logging.ignorableException(Vocabulary.LOGGER, About.class, "configuration", e);
return null;
}
}
/**
* Formats the given value preceded by a plus or minus sign.
* This method is used for formatting timezone offset.
*
* @param df the {@link DateFormat} to use for formatting the offset.
* @param offset the offset to format, as a positive or negative value.
* @param buffer the buffer where to format the offset.
* @return the given buffer, returned for convenience.
*/
private static StringBuffer format(final Format df, final int offset, final StringBuffer buffer) {
return df.format(Math.abs(offset), buffer.append(offset < 0 ? '-' : '+').append(' '), new FieldPosition(0));
}
/**
* Returns the given file relative to the given root, or {@code null} if the root is not
* a parent of that file.
*
* @param root the root directory (typically Java home or user home directory).
* @param file the file to make relative to the root.
* @return the file relative to the given root, or {@code null} if none.
*/
private static File relativize(final File root, final File file) {
File parent = file.getParentFile();
if (parent == null) {
return null;
}
if (root.equals(parent)) {
parent = null;
} else {
parent = relativize(root, parent);
if (parent == null) {
return null;
}
}
return new File(parent, file.getName());
}
}
|
googleapis/google-cloud-java | 35,861 | java-securitycenter/proto-google-cloud-securitycenter-v1/src/main/java/com/google/cloud/securitycenter/v1/ListDescendantEventThreatDetectionCustomModulesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/securitycenter/v1/securitycenter_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.securitycenter.v1;
/**
*
*
* <pre>
* Request to list current and descendant resident Event Threat Detection custom
* modules.
* </pre>
*
* Protobuf type {@code
* google.cloud.securitycenter.v1.ListDescendantEventThreatDetectionCustomModulesRequest}
*/
public final class ListDescendantEventThreatDetectionCustomModulesRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v1.ListDescendantEventThreatDetectionCustomModulesRequest)
ListDescendantEventThreatDetectionCustomModulesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDescendantEventThreatDetectionCustomModulesRequest.newBuilder() to construct.
private ListDescendantEventThreatDetectionCustomModulesRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDescendantEventThreatDetectionCustomModulesRequest() {
parent_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDescendantEventThreatDetectionCustomModulesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1_ListDescendantEventThreatDetectionCustomModulesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1_ListDescendantEventThreatDetectionCustomModulesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest.class,
com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Name of the parent to list custom modules under.
*
* Its format is:
*
* * `organizations/{organization}/eventThreatDetectionSettings`.
* * `folders/{folder}/eventThreatDetectionSettings`.
* * `projects/{project}/eventThreatDetectionSettings`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Name of the parent to list custom modules under.
*
* Its format is:
*
* * `organizations/{organization}/eventThreatDetectionSettings`.
* * `folders/{folder}/eventThreatDetectionSettings`.
* * `projects/{project}/eventThreatDetectionSettings`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token, received from a previous
* `ListDescendantEventThreatDetectionCustomModules` call. Provide this to
* retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* `ListDescendantEventThreatDetectionCustomModules` must match the call that
* provided the page token.
* </pre>
*
* <code>string page_token = 2;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A page token, received from a previous
* `ListDescendantEventThreatDetectionCustomModules` call. Provide this to
* retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* `ListDescendantEventThreatDetectionCustomModules` must match the call that
* provided the page token.
* </pre>
*
* <code>string page_token = 2;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of modules to return. The service may return fewer than
* this value.
* If unspecified, at most 10 configs will be returned.
* The maximum value is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, pageToken_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, pageToken_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof
com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest)) {
return super.equals(obj);
}
com.google.cloud.securitycenter.v1.ListDescendantEventThreatDetectionCustomModulesRequest
other =
(com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest)
obj;
if (!getParent().equals(other.getParent())) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.securitycenter.v1.ListDescendantEventThreatDetectionCustomModulesRequest
prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request to list current and descendant resident Event Threat Detection custom
* modules.
* </pre>
*
* Protobuf type {@code
* google.cloud.securitycenter.v1.ListDescendantEventThreatDetectionCustomModulesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v1.ListDescendantEventThreatDetectionCustomModulesRequest)
com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1_ListDescendantEventThreatDetectionCustomModulesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1_ListDescendantEventThreatDetectionCustomModulesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest.class,
com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest.Builder.class);
}
// Construct using
// com.google.cloud.securitycenter.v1.ListDescendantEventThreatDetectionCustomModulesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageToken_ = "";
pageSize_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.securitycenter.v1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1_ListDescendantEventThreatDetectionCustomModulesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1.ListDescendantEventThreatDetectionCustomModulesRequest
getDefaultInstanceForType() {
return com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.securitycenter.v1.ListDescendantEventThreatDetectionCustomModulesRequest
build() {
com.google.cloud.securitycenter.v1.ListDescendantEventThreatDetectionCustomModulesRequest
result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1.ListDescendantEventThreatDetectionCustomModulesRequest
buildPartial() {
com.google.cloud.securitycenter.v1.ListDescendantEventThreatDetectionCustomModulesRequest
result =
new com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.securitycenter.v1.ListDescendantEventThreatDetectionCustomModulesRequest
result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof
com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest) {
return mergeFrom(
(com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest)
other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.securitycenter.v1.ListDescendantEventThreatDetectionCustomModulesRequest
other) {
if (other
== com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Name of the parent to list custom modules under.
*
* Its format is:
*
* * `organizations/{organization}/eventThreatDetectionSettings`.
* * `folders/{folder}/eventThreatDetectionSettings`.
* * `projects/{project}/eventThreatDetectionSettings`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the parent to list custom modules under.
*
* Its format is:
*
* * `organizations/{organization}/eventThreatDetectionSettings`.
* * `folders/{folder}/eventThreatDetectionSettings`.
* * `projects/{project}/eventThreatDetectionSettings`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the parent to list custom modules under.
*
* Its format is:
*
* * `organizations/{organization}/eventThreatDetectionSettings`.
* * `folders/{folder}/eventThreatDetectionSettings`.
* * `projects/{project}/eventThreatDetectionSettings`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the parent to list custom modules under.
*
* Its format is:
*
* * `organizations/{organization}/eventThreatDetectionSettings`.
* * `folders/{folder}/eventThreatDetectionSettings`.
* * `projects/{project}/eventThreatDetectionSettings`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the parent to list custom modules under.
*
* Its format is:
*
* * `organizations/{organization}/eventThreatDetectionSettings`.
* * `folders/{folder}/eventThreatDetectionSettings`.
* * `projects/{project}/eventThreatDetectionSettings`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token, received from a previous
* `ListDescendantEventThreatDetectionCustomModules` call. Provide this to
* retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* `ListDescendantEventThreatDetectionCustomModules` must match the call that
* provided the page token.
* </pre>
*
* <code>string page_token = 2;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A page token, received from a previous
* `ListDescendantEventThreatDetectionCustomModules` call. Provide this to
* retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* `ListDescendantEventThreatDetectionCustomModules` must match the call that
* provided the page token.
* </pre>
*
* <code>string page_token = 2;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A page token, received from a previous
* `ListDescendantEventThreatDetectionCustomModules` call. Provide this to
* retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* `ListDescendantEventThreatDetectionCustomModules` must match the call that
* provided the page token.
* </pre>
*
* <code>string page_token = 2;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token, received from a previous
* `ListDescendantEventThreatDetectionCustomModules` call. Provide this to
* retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* `ListDescendantEventThreatDetectionCustomModules` must match the call that
* provided the page token.
* </pre>
*
* <code>string page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token, received from a previous
* `ListDescendantEventThreatDetectionCustomModules` call. Provide this to
* retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* `ListDescendantEventThreatDetectionCustomModules` must match the call that
* provided the page token.
* </pre>
*
* <code>string page_token = 2;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of modules to return. The service may return fewer than
* this value.
* If unspecified, at most 10 configs will be returned.
* The maximum value is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of modules to return. The service may return fewer than
* this value.
* If unspecified, at most 10 configs will be returned.
* The maximum value is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of modules to return. The service may return fewer than
* this value.
* If unspecified, at most 10 configs will be returned.
* The maximum value is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v1.ListDescendantEventThreatDetectionCustomModulesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v1.ListDescendantEventThreatDetectionCustomModulesRequest)
private static final com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest();
}
public static com.google.cloud.securitycenter.v1
.ListDescendantEventThreatDetectionCustomModulesRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<
ListDescendantEventThreatDetectionCustomModulesRequest>
PARSER =
new com.google.protobuf.AbstractParser<
ListDescendantEventThreatDetectionCustomModulesRequest>() {
@java.lang.Override
public ListDescendantEventThreatDetectionCustomModulesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException()
.setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDescendantEventThreatDetectionCustomModulesRequest>
parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDescendantEventThreatDetectionCustomModulesRequest>
getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1.ListDescendantEventThreatDetectionCustomModulesRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/qpid-broker-j | 36,257 | systests/protocol-tests-amqp-1-0/src/test/java/org/apache/qpid/tests/protocol/v1_0/transaction/TransactionalTransferTest.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.qpid.tests.protocol.v1_0.transaction;
import static org.hamcrest.CoreMatchers.anyOf;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.junit.jupiter.api.Assumptions.assumeTrue;
import java.util.List;
import java.util.Map;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.apache.qpid.server.protocol.v1_0.type.Binary;
import org.apache.qpid.server.protocol.v1_0.type.ErrorCarryingFrameBody;
import org.apache.qpid.server.protocol.v1_0.type.Symbol;
import org.apache.qpid.server.protocol.v1_0.type.UnsignedInteger;
import org.apache.qpid.server.protocol.v1_0.type.messaging.Accepted;
import org.apache.qpid.server.protocol.v1_0.type.transaction.TransactionError;
import org.apache.qpid.server.protocol.v1_0.type.transaction.TransactionalState;
import org.apache.qpid.server.protocol.v1_0.type.transport.Attach;
import org.apache.qpid.server.protocol.v1_0.type.transport.Begin;
import org.apache.qpid.server.protocol.v1_0.type.transport.Disposition;
import org.apache.qpid.server.protocol.v1_0.type.transport.Error;
import org.apache.qpid.server.protocol.v1_0.type.transport.Flow;
import org.apache.qpid.server.protocol.v1_0.type.transport.ReceiverSettleMode;
import org.apache.qpid.server.protocol.v1_0.type.transport.Role;
import org.apache.qpid.server.protocol.v1_0.type.transport.Transfer;
import org.apache.qpid.tests.protocol.Response;
import org.apache.qpid.tests.protocol.SpecificationTest;
import org.apache.qpid.tests.protocol.v1_0.FrameTransport;
import org.apache.qpid.tests.protocol.v1_0.Interaction;
import org.apache.qpid.tests.protocol.v1_0.Utils;
import org.apache.qpid.tests.utils.BrokerAdmin;
import org.apache.qpid.tests.utils.BrokerAdminUsingTestBase;
public class TransactionalTransferTest extends BrokerAdminUsingTestBase
{
@BeforeEach
public void setUp()
{
getBrokerAdmin().createQueue(BrokerAdmin.TEST_QUEUE_NAME);
}
@Test
@SpecificationTest(section = "4.4.4",
description = "Transactional Posting[...]the transaction controller wishes to associate an outgoing"
+ " transfer with a transaction, it MUST set the state of the transfer with a"
+ "transactional-state carrying the appropriate transaction identifier.")
public void sendTransactionalPostingReceiverSettlesFirst() throws Exception
{
try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect())
{
final UnsignedInteger linkHandle = UnsignedInteger.ONE;
final Interaction interaction = transport.newInteraction();
Disposition responseDisposition = interaction.negotiateOpen()
.begin()
.consumeResponse(Begin.class)
.txnAttachCoordinatorLink(UnsignedInteger.ZERO, this::coordinatorAttachExpected)
.txnDeclare()
.attachRole(Role.SENDER)
.attachTargetAddress(BrokerAdmin.TEST_QUEUE_NAME)
.attachHandle(linkHandle)
.attach().consumeResponse(Attach.class)
.consumeResponse(Flow.class)
.transferDeliveryId()
.transferHandle(linkHandle)
.transferPayloadData(getTestName())
.transferTransactionalStateFromCurrentTransaction()
.transfer()
.consume(Disposition.class, Flow.class);
assertThat(responseDisposition.getRole(), is(Role.RECEIVER));
assertThat(responseDisposition.getSettled(), is(Boolean.TRUE));
assertThat(responseDisposition.getState(), is(instanceOf(TransactionalState.class)));
assertThat(((TransactionalState) responseDisposition.getState()).getOutcome(), is(instanceOf(Accepted.class)));
interaction.txnDischarge(false);
assertThat(interaction.getCoordinatorLatestDeliveryState(), is(instanceOf(Accepted.class)));
}
Object receivedMessage = Utils.receiveMessage(getBrokerAdmin(), BrokerAdmin.TEST_QUEUE_NAME);
assertThat(receivedMessage, is(equalTo(getTestName())));
}
@Test
@SpecificationTest(section = "4.4.4",
description = "Transactional Posting[...]the transaction controller wishes to associate an outgoing"
+ " transfer with a transaction, it MUST set the state of the transfer with a"
+ "transactional-state carrying the appropriate transaction identifier.")
public void sendTransactionalPostingDischargeFail() throws Exception
{
try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect())
{
final UnsignedInteger linkHandle = UnsignedInteger.ONE;
final Interaction interaction = transport.newInteraction();
Disposition responseDisposition = interaction.negotiateOpen()
.begin()
.consumeResponse(Begin.class)
.txnAttachCoordinatorLink(UnsignedInteger.ZERO, this::coordinatorAttachExpected)
.txnDeclare()
.attachRole(Role.SENDER)
.attachTargetAddress(BrokerAdmin.TEST_QUEUE_NAME)
.attachHandle(linkHandle)
.attach().consumeResponse(Attach.class)
.consumeResponse(Flow.class)
.transferDeliveryId()
.transferHandle(linkHandle)
.transferPayloadData(getTestName())
.transferTransactionalStateFromCurrentTransaction()
.transfer()
.consume(Disposition.class, Flow.class);
assertThat(responseDisposition.getRole(), is(Role.RECEIVER));
assertThat(responseDisposition.getSettled(), is(Boolean.TRUE));
assertThat(responseDisposition.getState(), is(instanceOf(TransactionalState.class)));
assertThat(((TransactionalState) responseDisposition.getState()).getOutcome(), is(instanceOf(Accepted.class)));
interaction.txnDischarge(true);
assertThat(interaction.getCoordinatorLatestDeliveryState(), is(instanceOf(Accepted.class)));
final String content = getTestName() + "_2";
Utils.putMessageOnQueue(getBrokerAdmin(), BrokerAdmin.TEST_QUEUE_NAME, content);
assertThat(Utils.receiveMessage(getBrokerAdmin(), BrokerAdmin.TEST_QUEUE_NAME), is(equalTo(content)));
}
}
@Test
@SpecificationTest(section = "4.4.4",
description = "Transactional Posting[...]the transaction controller wishes to associate an outgoing"
+ " transfer with a transaction, it MUST set the state of the transfer with a"
+ "transactional-state carrying the appropriate transaction identifier.")
public void sendTransactionalPostingReceiverSettlesSecond() throws Exception
{
try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect())
{
final UnsignedInteger linkHandle = UnsignedInteger.ONE;
final Interaction interaction = transport.newInteraction();
Disposition responseDisposition = interaction.negotiateOpen()
.begin()
.consumeResponse(Begin.class)
.txnAttachCoordinatorLink(UnsignedInteger.ZERO, this::coordinatorAttachExpected)
.txnDeclare()
.attachRole(Role.SENDER)
.attachTargetAddress(BrokerAdmin.TEST_QUEUE_NAME)
.attachRcvSettleMode(ReceiverSettleMode.SECOND)
.attachHandle(linkHandle)
.attach()
.consumeResponse(Attach.class)
.assertLatestResponse(Attach.class, this::assumeReceiverSettlesSecond)
.consumeResponse(Flow.class)
.transferDeliveryId()
.transferHandle(linkHandle)
.transferPayloadData(getTestName())
.transferTransactionalStateFromCurrentTransaction()
.transfer()
.consumeResponse(Disposition.class)
.getLatestResponse(Disposition.class);
assertThat(responseDisposition.getRole(), is(Role.RECEIVER));
assertThat(responseDisposition.getSettled(), is(Boolean.FALSE));
assertThat(responseDisposition.getState(), is(instanceOf(TransactionalState.class)));
assertThat(((TransactionalState) responseDisposition.getState()).getOutcome(), is(instanceOf(Accepted.class)));
interaction.dispositionRole(Role.SENDER)
.dispositionSettled(true)
.dispositionTransactionalStateFromCurrentTransaction(new Accepted())
.disposition();
interaction.txnDischarge(false);
assertThat(interaction.getCoordinatorLatestDeliveryState(), is(instanceOf(Accepted.class)));
}
assertThat(Utils.receiveMessage(getBrokerAdmin(), BrokerAdmin.TEST_QUEUE_NAME), is(equalTo(getTestName())));
}
@Test
@SpecificationTest(section = "4.4.1",
description = "If the transaction controller wishes to associate an outgoing transfer with a transaction,"
+ " it MUST set the state of the transfer with a transactional-state carrying the appropriate"
+ " transaction identifier.")
public void sendTransactionalPostingTransferFailsDueToUnknownTransactionId() throws Exception
{
try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect())
{
final UnsignedInteger linkHandle = UnsignedInteger.ONE;
final Interaction interaction = transport.newInteraction();
ErrorCarryingFrameBody response = interaction.negotiateOpen()
.begin().consumeResponse(Begin.class)
.txnAttachCoordinatorLink(UnsignedInteger.ZERO, this::coordinatorAttachExpected)
.txnDeclare()
.attachRole(Role.SENDER)
.attachTargetAddress(BrokerAdmin.TEST_QUEUE_NAME)
.attachHandle(linkHandle)
.attach().consumeResponse(Attach.class)
.consumeResponse(Flow.class)
.transferDeliveryId()
.transferHandle(linkHandle)
.transferPayloadData(getTestName())
.transferTransactionalState(integerToBinary(Integer.MAX_VALUE))
.transfer()
.consume(ErrorCarryingFrameBody.class, Flow.class);
final Error error = response.getError();
assertThat(error, is(notNullValue()));
assertThat(error.getCondition(), equalTo(TransactionError.UNKNOWN_ID));
}
}
@Test
@SpecificationTest(section = "4.4.2", description = "Transactional Retirement[...] The transaction controller might"
+ "wish to associate the outcome of a delivery with a transaction.")
public void receiveTransactionalRetirementReceiverSettleFirst() throws Exception
{
Utils.putMessageOnQueue(getBrokerAdmin(), BrokerAdmin.TEST_QUEUE_NAME, getTestName());
try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect())
{
final Interaction interaction = transport.newInteraction();
interaction.negotiateOpen()
.begin()
.consumeResponse(Begin.class)
.txnAttachCoordinatorLink(UnsignedInteger.ZERO, this::coordinatorAttachExpected)
.txnDeclare()
.attachRole(Role.RECEIVER)
.attachHandle(UnsignedInteger.ONE)
.attachSourceAddress(BrokerAdmin.TEST_QUEUE_NAME)
.attachRcvSettleMode(ReceiverSettleMode.FIRST)
.attach()
.consumeResponse(Attach.class)
.flowIncomingWindow(UnsignedInteger.ONE)
.flowNextIncomingIdFromPeerLatestSessionBeginAndDeliveryCount()
.flowOutgoingWindow(UnsignedInteger.ZERO)
.flowNextOutgoingId(UnsignedInteger.ZERO)
.flowLinkCredit(UnsignedInteger.ONE)
.flowHandleFromLinkHandle()
.flow()
.receiveDelivery()
.decodeLatestDelivery();
Object data = interaction.getDecodedLatestDelivery();
assertThat(data, is(equalTo(getTestName())));
interaction.dispositionSettled(true)
.dispositionRole(Role.RECEIVER)
.dispositionTransactionalStateFromCurrentTransaction(new Accepted())
.disposition().txnDischarge(false);
assertThat(interaction.getCoordinatorLatestDeliveryState(), is(instanceOf(Accepted.class)));
}
}
@Test
@SpecificationTest(section = "4.4.2", description = "Transactional Retirement[...] The transaction controller might"
+ "wish to associate the outcome of a delivery with a transaction.")
public void receiveTransactionalRetirementDischargeFail() throws Exception
{
Utils.putMessageOnQueue(getBrokerAdmin(), BrokerAdmin.TEST_QUEUE_NAME, getTestName());
try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect())
{
final Interaction interaction = transport.newInteraction();
interaction.negotiateOpen()
.begin()
.consumeResponse(Begin.class)
.txnAttachCoordinatorLink(UnsignedInteger.ZERO, this::coordinatorAttachExpected)
.txnDeclare()
.attachRole(Role.RECEIVER)
.attachHandle(UnsignedInteger.ONE)
.attachSourceAddress(BrokerAdmin.TEST_QUEUE_NAME)
.attachRcvSettleMode(ReceiverSettleMode.FIRST)
.attach()
.consumeResponse(Attach.class)
.flowIncomingWindow(UnsignedInteger.ONE)
.flowNextIncomingIdFromPeerLatestSessionBeginAndDeliveryCount()
.flowOutgoingWindow(UnsignedInteger.ZERO)
.flowNextOutgoingId(UnsignedInteger.ZERO)
.flowLinkCredit(UnsignedInteger.ONE)
.flowHandleFromLinkHandle()
.flow()
.receiveDelivery()
.decodeLatestDelivery();
Object data = interaction.getDecodedLatestDelivery();
assertThat(data, is(equalTo(getTestName())));
interaction.dispositionSettled(true)
.dispositionRole(Role.RECEIVER)
.dispositionTransactionalStateFromCurrentTransaction(new Accepted())
.disposition().txnDischarge(true);
assertThat(interaction.getCoordinatorLatestDeliveryState(), is(instanceOf(Accepted.class)));
Object receivedMessage = Utils.receiveMessage(getBrokerAdmin(), BrokerAdmin.TEST_QUEUE_NAME);
assertThat(receivedMessage, is(equalTo(getTestName())));
}
}
@Test
@SpecificationTest(section = "4.4.2", description = "Transactional Retirement[...]"
+ " To associate an outcome with a transaction the controller"
+ " sends a disposition performative which sets the state"
+ " of the delivery to a transactional-state with the desired"
+ " transaction identifier and the outcome to be applied"
+ " upon a successful discharge.")
public void receiveTransactionalRetirementDispositionFailsDueToUnknownTransactionId() throws Exception
{
Utils.putMessageOnQueue(getBrokerAdmin(), BrokerAdmin.TEST_QUEUE_NAME, getTestName());
try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect())
{
final Interaction interaction = transport.newInteraction();
List<Transfer> transfers = interaction.negotiateOpen()
.begin().consumeResponse(Begin.class)
.txnAttachCoordinatorLink(UnsignedInteger.ZERO, this::coordinatorAttachExpected)
.txnDeclare()
.attachRole(Role.RECEIVER)
.attachHandle(UnsignedInteger.ONE)
.attachSourceAddress(BrokerAdmin.TEST_QUEUE_NAME)
.attachRcvSettleMode(ReceiverSettleMode.FIRST)
.attach().consumeResponse(Attach.class)
.flowIncomingWindow(UnsignedInteger.ONE)
.flowNextIncomingIdFromPeerLatestSessionBeginAndDeliveryCount()
.flowOutgoingWindow(UnsignedInteger.ZERO)
.flowNextOutgoingId(UnsignedInteger.ZERO)
.flowLinkCredit(UnsignedInteger.ONE)
.flowHandleFromLinkHandle()
.flow()
.receiveDelivery()
.getLatestDelivery();
UnsignedInteger deliveryId = transfers.get(0).getDeliveryId();
assertThat(deliveryId, is(notNullValue()));
Object data = interaction.decodeLatestDelivery().getDecodedLatestDelivery();
assertThat(data, is(equalTo(getTestName())));
ErrorCarryingFrameBody response = interaction.dispositionSettled(true)
.dispositionRole(Role.RECEIVER)
.dispositionTransactionalState(integerToBinary(Integer.MAX_VALUE),
new Accepted())
.dispositionFirst(deliveryId)
.disposition()
.consume(ErrorCarryingFrameBody.class, Flow.class);
final Error error = response.getError();
assertThat(error, is(notNullValue()));
assertThat(error.getCondition(), equalTo(TransactionError.UNKNOWN_ID));
}
finally
{
assertThat(Utils.receiveMessage(getBrokerAdmin(), BrokerAdmin.TEST_QUEUE_NAME), is(equalTo(getTestName())));
}
}
@Disabled("TODO disposition is currently not being sent by Broker")
@Test
@SpecificationTest(section = "4.4.2", description = "Transactional Retirement[...] The transaction controller might"
+ "wish to associate the outcome of a delivery with a transaction.")
public void receiveTransactionalRetirementReceiverSettleSecond() throws Exception
{
Utils.putMessageOnQueue(getBrokerAdmin(), BrokerAdmin.TEST_QUEUE_NAME, getTestName());
try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect())
{
final Interaction interaction = transport.newInteraction();
interaction.negotiateOpen()
.begin()
.consumeResponse(Begin.class)
.txnAttachCoordinatorLink(UnsignedInteger.ZERO, this::coordinatorAttachExpected)
.txnDeclare()
.attachRole(Role.RECEIVER)
.attachHandle(UnsignedInteger.ONE)
.attachSourceAddress(BrokerAdmin.TEST_QUEUE_NAME)
.attachRcvSettleMode(ReceiverSettleMode.SECOND)
.assertLatestResponse(Attach.class, this::assumeReceiverSettlesSecond)
.attach()
.consumeResponse(Attach.class)
.flowIncomingWindow(UnsignedInteger.ONE)
.flowNextIncomingId(UnsignedInteger.ZERO)
.flowOutgoingWindow(UnsignedInteger.ZERO)
.flowNextOutgoingId(UnsignedInteger.ZERO)
.flowLinkCredit(UnsignedInteger.ONE)
.flowHandleFromLinkHandle()
.flow()
.receiveDelivery()
.decodeLatestDelivery();
Object data = interaction.getDecodedLatestDelivery();
assertThat(data, is(equalTo(getTestName())));
Disposition settledDisposition = interaction.dispositionSettled(false)
.dispositionRole(Role.RECEIVER)
.dispositionTransactionalStateFromCurrentTransaction(new Accepted())
.disposition()
.consumeResponse(Disposition.class)
.getLatestResponse(Disposition.class);
assertThat(settledDisposition.getSettled(), is(true));
assertThat(settledDisposition.getState(), is(instanceOf(TransactionalState.class)));
assertThat(((TransactionalState) settledDisposition.getState()).getOutcome(), is(instanceOf(Accepted.class)));
interaction.txnDischarge(false);
assertThat(interaction.getCoordinatorLatestDeliveryState(), is(instanceOf(Accepted.class)));
}
}
@Test
@SpecificationTest(section = "4.4.2", description = "Transactional Acquisition[...]In the case of the flow frame,"
+ " the transactional work is not necessarily directly"
+ " initiated or entirely determined when the flow frame"
+ " arrives at the resource, but can in fact occur at some "
+ " later point and in ways not necessarily"
+ " anticipated by the controller.")
public void receiveTransactionalAcquisitionReceiverSettleFirst() throws Exception
{
Utils.putMessageOnQueue(getBrokerAdmin(), BrokerAdmin.TEST_QUEUE_NAME, getTestName());
try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect())
{
final Interaction interaction = transport.newInteraction();
interaction.negotiateOpen()
.begin()
.consumeResponse(Begin.class)
.txnAttachCoordinatorLink(UnsignedInteger.ZERO, this::coordinatorAttachExpected)
.txnDeclare()
.attachRole(Role.RECEIVER)
.attachHandle(UnsignedInteger.ONE)
.attachSourceAddress(BrokerAdmin.TEST_QUEUE_NAME)
.attachRcvSettleMode(ReceiverSettleMode.FIRST)
.attach()
.consumeResponse(Attach.class)
.flowIncomingWindow(UnsignedInteger.ONE)
.flowNextIncomingIdFromPeerLatestSessionBeginAndDeliveryCount()
.flowOutgoingWindow(UnsignedInteger.ZERO)
.flowNextOutgoingId(UnsignedInteger.ZERO)
.flowLinkCredit(UnsignedInteger.ONE)
.flowHandleFromLinkHandle()
.flowProperties(Map.of(Symbol.valueOf("txn-id"), interaction.getCurrentTransactionId()))
.flow()
.receiveDelivery();
List<Transfer> transfers = interaction.getLatestDelivery();
assertThat(transfers.size(), is(equalTo(1)));
Object data = interaction.decodeLatestDelivery().getDecodedLatestDelivery();
assertThat(data, is(equalTo(getTestName())));
interaction.dispositionSettled(true)
.dispositionRole(Role.RECEIVER)
.dispositionTransactionalStateFromCurrentTransaction(new Accepted())
.dispositionFirstFromLatestDelivery()
.disposition().txnDischarge(false);
assertThat(interaction.getCoordinatorLatestDeliveryState(), is(instanceOf(Accepted.class)));
Transfer transfer = transfers.get(0);
assumeTrue(is(instanceOf(TransactionalState.class)).matches(transfer.getState()));
assumeTrue(is(equalTo(interaction.getCurrentTransactionId())).matches(((TransactionalState) transfer.getState()).getTxnId()));
final String content = getTestName() + "_2";
Utils.putMessageOnQueue(getBrokerAdmin(), BrokerAdmin.TEST_QUEUE_NAME, content);
assertThat(Utils.receiveMessage(getBrokerAdmin(), BrokerAdmin.TEST_QUEUE_NAME), is(equalTo(content)));
}
}
@Test
@SpecificationTest(section = "4.4.3", description = "Transactional Acquisition[...]In the case of the flow frame,"
+ " the transactional work is not necessarily directly"
+ " initiated or entirely determined when the flow frame"
+ " arrives at the resource, but can in fact occur at some "
+ " later point and in ways not necessarily"
+ " anticipated by the controller.")
public void receiveTransactionalAcquisitionDischargeFail() throws Exception
{
Utils.putMessageOnQueue(getBrokerAdmin(), BrokerAdmin.TEST_QUEUE_NAME, getTestName());
try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect())
{
final Interaction interaction = transport.newInteraction();
interaction.negotiateOpen()
.begin()
.consumeResponse(Begin.class)
.txnAttachCoordinatorLink(UnsignedInteger.ZERO, this::coordinatorAttachExpected)
.txnDeclare()
.attachRole(Role.RECEIVER)
.attachHandle(UnsignedInteger.ONE)
.attachSourceAddress(BrokerAdmin.TEST_QUEUE_NAME)
.attachRcvSettleMode(ReceiverSettleMode.FIRST)
.attach()
.consumeResponse(Attach.class)
.flowIncomingWindow(UnsignedInteger.ONE)
.flowNextIncomingIdFromPeerLatestSessionBeginAndDeliveryCount()
.flowOutgoingWindow(UnsignedInteger.ZERO)
.flowNextOutgoingId(UnsignedInteger.ZERO)
.flowLinkCredit(UnsignedInteger.ONE)
.flowHandleFromLinkHandle()
.flowProperties(Map.of(Symbol.valueOf("txn-id"), interaction.getCurrentTransactionId()))
.flow()
.receiveDelivery();
List<Transfer> transfers = interaction.getLatestDelivery();
assertThat(transfers.size(), is(equalTo(1)));
Object data = interaction.decodeLatestDelivery().getDecodedLatestDelivery();
assertThat(data, is(equalTo(getTestName())));
interaction.dispositionSettled(true)
.dispositionRole(Role.RECEIVER)
.dispositionTransactionalState(interaction.getCurrentTransactionId(), new Accepted())
.disposition().txnDischarge(true);
assertThat(interaction.getCoordinatorLatestDeliveryState(), is(instanceOf(Accepted.class)));
assertThat(Utils.receiveMessage(getBrokerAdmin(), BrokerAdmin.TEST_QUEUE_NAME), is(equalTo(getTestName())));
Transfer transfer = transfers.get(0);
assumeTrue(is(instanceOf(TransactionalState.class)).matches(transfer.getState()));
assumeTrue(is(equalTo(interaction.getCurrentTransactionId())).matches(((TransactionalState) transfer.getState()).getTxnId()));
}
}
@Test
@Disabled("QPID-7951")
@SpecificationTest(section = "4.4.3", description = "Transactional Acquisition[...]"
+ " the resource associates an additional piece of state with"
+ " outgoing link endpoints, a txn-id that identifies"
+ " the transaction with which acquired messages"
+ " will be associated. This state is determined by"
+ " the controller by specifying a txn-id entry in the"
+ " properties map of the flow frame.")
public void receiveTransactionalAcquisitionFlowFailsDueToUnknownTransactionId() throws Exception
{
Utils.putMessageOnQueue(getBrokerAdmin(), BrokerAdmin.TEST_QUEUE_NAME, getTestName());
try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect())
{
final Interaction interaction = transport.newInteraction();
ErrorCarryingFrameBody response = interaction.negotiateOpen()
.begin()
.consumeResponse(Begin.class)
.txnAttachCoordinatorLink(UnsignedInteger.ZERO, this::coordinatorAttachExpected)
.txnDeclare()
.attachRole(Role.RECEIVER)
.attachHandle(UnsignedInteger.ONE)
.attachSourceAddress(BrokerAdmin.TEST_QUEUE_NAME)
.attachRcvSettleMode(ReceiverSettleMode.FIRST)
.attach()
.consumeResponse(Attach.class)
.flowIncomingWindow(UnsignedInteger.ONE)
.flowLinkCredit(UnsignedInteger.ONE)
.flowHandleFromLinkHandle()
.flowProperties(Map.of(Symbol.valueOf("txn-id"),
integerToBinary(Integer.MAX_VALUE)))
.flow()
.consume(ErrorCarryingFrameBody.class, Flow.class);
final Error error = response.getError();
assertThat(error, is(notNullValue()));
assertThat(error.getCondition(), equalTo(TransactionError.UNKNOWN_ID));
}
finally
{
assertThat(Utils.receiveMessage(getBrokerAdmin(), BrokerAdmin.TEST_QUEUE_NAME), is(equalTo(getTestName())));
}
}
Binary integerToBinary(final int txnId)
{
byte[] data = new byte[4];
data[3] = (byte) (txnId & 0xff);
data[2] = (byte) ((txnId & 0xff00) >> 8);
data[1] = (byte) ((txnId & 0xff0000) >> 16);
data[0] = (byte) ((txnId & 0xff000000) >> 24);
return new Binary(data);
}
private void assumeReceiverSettlesSecond(final Attach attach)
{
assumeTrue(is(equalTo(ReceiverSettleMode.SECOND)).matches(attach.getRcvSettleMode()));
}
private void coordinatorAttachExpected(final Response<?> response)
{
assertThat(response, is(notNullValue()));
assumeTrue(anyOf(instanceOf(Attach.class), instanceOf(Flow.class)).matches(response.getBody()));
}
}
|
googleapis/google-cloud-java | 35,970 | java-document-ai/proto-google-cloud-document-ai-v1beta3/src/main/java/com/google/cloud/documentai/v1beta3/EvaluateProcessorVersionRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/documentai/v1beta3/document_processor_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.documentai.v1beta3;
/**
*
*
* <pre>
* Evaluates the given
* [ProcessorVersion][google.cloud.documentai.v1beta3.ProcessorVersion] against
* the supplied documents.
* </pre>
*
* Protobuf type {@code google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest}
*/
public final class EvaluateProcessorVersionRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest)
EvaluateProcessorVersionRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use EvaluateProcessorVersionRequest.newBuilder() to construct.
private EvaluateProcessorVersionRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private EvaluateProcessorVersionRequest() {
processorVersion_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new EvaluateProcessorVersionRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService
.internal_static_google_cloud_documentai_v1beta3_EvaluateProcessorVersionRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService
.internal_static_google_cloud_documentai_v1beta3_EvaluateProcessorVersionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest.class,
com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest.Builder.class);
}
private int bitField0_;
public static final int PROCESSOR_VERSION_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object processorVersion_ = "";
/**
*
*
* <pre>
* Required. The resource name of the
* [ProcessorVersion][google.cloud.documentai.v1beta3.ProcessorVersion] to
* evaluate.
* `projects/{project}/locations/{location}/processors/{processor}/processorVersions/{processorVersion}`
* </pre>
*
* <code>
* string processor_version = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The processorVersion.
*/
@java.lang.Override
public java.lang.String getProcessorVersion() {
java.lang.Object ref = processorVersion_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
processorVersion_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The resource name of the
* [ProcessorVersion][google.cloud.documentai.v1beta3.ProcessorVersion] to
* evaluate.
* `projects/{project}/locations/{location}/processors/{processor}/processorVersions/{processorVersion}`
* </pre>
*
* <code>
* string processor_version = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for processorVersion.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProcessorVersionBytes() {
java.lang.Object ref = processorVersion_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
processorVersion_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int EVALUATION_DOCUMENTS_FIELD_NUMBER = 3;
private com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig evaluationDocuments_;
/**
*
*
* <pre>
* Optional. The documents used in the evaluation. If unspecified, use the
* processor's dataset as evaluation input.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the evaluationDocuments field is set.
*/
@java.lang.Override
public boolean hasEvaluationDocuments() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. The documents used in the evaluation. If unspecified, use the
* processor's dataset as evaluation input.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The evaluationDocuments.
*/
@java.lang.Override
public com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig getEvaluationDocuments() {
return evaluationDocuments_ == null
? com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig.getDefaultInstance()
: evaluationDocuments_;
}
/**
*
*
* <pre>
* Optional. The documents used in the evaluation. If unspecified, use the
* processor's dataset as evaluation input.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfigOrBuilder
getEvaluationDocumentsOrBuilder() {
return evaluationDocuments_ == null
? com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig.getDefaultInstance()
: evaluationDocuments_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(processorVersion_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, processorVersion_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getEvaluationDocuments());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(processorVersion_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, processorVersion_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getEvaluationDocuments());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest)) {
return super.equals(obj);
}
com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest other =
(com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest) obj;
if (!getProcessorVersion().equals(other.getProcessorVersion())) return false;
if (hasEvaluationDocuments() != other.hasEvaluationDocuments()) return false;
if (hasEvaluationDocuments()) {
if (!getEvaluationDocuments().equals(other.getEvaluationDocuments())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PROCESSOR_VERSION_FIELD_NUMBER;
hash = (53 * hash) + getProcessorVersion().hashCode();
if (hasEvaluationDocuments()) {
hash = (37 * hash) + EVALUATION_DOCUMENTS_FIELD_NUMBER;
hash = (53 * hash) + getEvaluationDocuments().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Evaluates the given
* [ProcessorVersion][google.cloud.documentai.v1beta3.ProcessorVersion] against
* the supplied documents.
* </pre>
*
* Protobuf type {@code google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest)
com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService
.internal_static_google_cloud_documentai_v1beta3_EvaluateProcessorVersionRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService
.internal_static_google_cloud_documentai_v1beta3_EvaluateProcessorVersionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest.class,
com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest.Builder.class);
}
// Construct using
// com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getEvaluationDocumentsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
processorVersion_ = "";
evaluationDocuments_ = null;
if (evaluationDocumentsBuilder_ != null) {
evaluationDocumentsBuilder_.dispose();
evaluationDocumentsBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService
.internal_static_google_cloud_documentai_v1beta3_EvaluateProcessorVersionRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest
getDefaultInstanceForType() {
return com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest build() {
com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest buildPartial() {
com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest result =
new com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.processorVersion_ = processorVersion_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.evaluationDocuments_ =
evaluationDocumentsBuilder_ == null
? evaluationDocuments_
: evaluationDocumentsBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest) {
return mergeFrom(
(com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest other) {
if (other
== com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest
.getDefaultInstance()) return this;
if (!other.getProcessorVersion().isEmpty()) {
processorVersion_ = other.processorVersion_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasEvaluationDocuments()) {
mergeEvaluationDocuments(other.getEvaluationDocuments());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
processorVersion_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 26:
{
input.readMessage(
getEvaluationDocumentsFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object processorVersion_ = "";
/**
*
*
* <pre>
* Required. The resource name of the
* [ProcessorVersion][google.cloud.documentai.v1beta3.ProcessorVersion] to
* evaluate.
* `projects/{project}/locations/{location}/processors/{processor}/processorVersions/{processorVersion}`
* </pre>
*
* <code>
* string processor_version = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The processorVersion.
*/
public java.lang.String getProcessorVersion() {
java.lang.Object ref = processorVersion_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
processorVersion_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the
* [ProcessorVersion][google.cloud.documentai.v1beta3.ProcessorVersion] to
* evaluate.
* `projects/{project}/locations/{location}/processors/{processor}/processorVersions/{processorVersion}`
* </pre>
*
* <code>
* string processor_version = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for processorVersion.
*/
public com.google.protobuf.ByteString getProcessorVersionBytes() {
java.lang.Object ref = processorVersion_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
processorVersion_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the
* [ProcessorVersion][google.cloud.documentai.v1beta3.ProcessorVersion] to
* evaluate.
* `projects/{project}/locations/{location}/processors/{processor}/processorVersions/{processorVersion}`
* </pre>
*
* <code>
* string processor_version = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The processorVersion to set.
* @return This builder for chaining.
*/
public Builder setProcessorVersion(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
processorVersion_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the
* [ProcessorVersion][google.cloud.documentai.v1beta3.ProcessorVersion] to
* evaluate.
* `projects/{project}/locations/{location}/processors/{processor}/processorVersions/{processorVersion}`
* </pre>
*
* <code>
* string processor_version = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearProcessorVersion() {
processorVersion_ = getDefaultInstance().getProcessorVersion();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the
* [ProcessorVersion][google.cloud.documentai.v1beta3.ProcessorVersion] to
* evaluate.
* `projects/{project}/locations/{location}/processors/{processor}/processorVersions/{processorVersion}`
* </pre>
*
* <code>
* string processor_version = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for processorVersion to set.
* @return This builder for chaining.
*/
public Builder setProcessorVersionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
processorVersion_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig evaluationDocuments_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig,
com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig.Builder,
com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfigOrBuilder>
evaluationDocumentsBuilder_;
/**
*
*
* <pre>
* Optional. The documents used in the evaluation. If unspecified, use the
* processor's dataset as evaluation input.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the evaluationDocuments field is set.
*/
public boolean hasEvaluationDocuments() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. The documents used in the evaluation. If unspecified, use the
* processor's dataset as evaluation input.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The evaluationDocuments.
*/
public com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig getEvaluationDocuments() {
if (evaluationDocumentsBuilder_ == null) {
return evaluationDocuments_ == null
? com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig.getDefaultInstance()
: evaluationDocuments_;
} else {
return evaluationDocumentsBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. The documents used in the evaluation. If unspecified, use the
* processor's dataset as evaluation input.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setEvaluationDocuments(
com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig value) {
if (evaluationDocumentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
evaluationDocuments_ = value;
} else {
evaluationDocumentsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The documents used in the evaluation. If unspecified, use the
* processor's dataset as evaluation input.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setEvaluationDocuments(
com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig.Builder builderForValue) {
if (evaluationDocumentsBuilder_ == null) {
evaluationDocuments_ = builderForValue.build();
} else {
evaluationDocumentsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The documents used in the evaluation. If unspecified, use the
* processor's dataset as evaluation input.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeEvaluationDocuments(
com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig value) {
if (evaluationDocumentsBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& evaluationDocuments_ != null
&& evaluationDocuments_
!= com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig
.getDefaultInstance()) {
getEvaluationDocumentsBuilder().mergeFrom(value);
} else {
evaluationDocuments_ = value;
}
} else {
evaluationDocumentsBuilder_.mergeFrom(value);
}
if (evaluationDocuments_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. The documents used in the evaluation. If unspecified, use the
* processor's dataset as evaluation input.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearEvaluationDocuments() {
bitField0_ = (bitField0_ & ~0x00000002);
evaluationDocuments_ = null;
if (evaluationDocumentsBuilder_ != null) {
evaluationDocumentsBuilder_.dispose();
evaluationDocumentsBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The documents used in the evaluation. If unspecified, use the
* processor's dataset as evaluation input.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig.Builder
getEvaluationDocumentsBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getEvaluationDocumentsFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. The documents used in the evaluation. If unspecified, use the
* processor's dataset as evaluation input.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfigOrBuilder
getEvaluationDocumentsOrBuilder() {
if (evaluationDocumentsBuilder_ != null) {
return evaluationDocumentsBuilder_.getMessageOrBuilder();
} else {
return evaluationDocuments_ == null
? com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig.getDefaultInstance()
: evaluationDocuments_;
}
}
/**
*
*
* <pre>
* Optional. The documents used in the evaluation. If unspecified, use the
* processor's dataset as evaluation input.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig,
com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig.Builder,
com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfigOrBuilder>
getEvaluationDocumentsFieldBuilder() {
if (evaluationDocumentsBuilder_ == null) {
evaluationDocumentsBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig,
com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig.Builder,
com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfigOrBuilder>(
getEvaluationDocuments(), getParentForChildren(), isClean());
evaluationDocuments_ = null;
}
return evaluationDocumentsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest)
private static final com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest();
}
public static com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<EvaluateProcessorVersionRequest> PARSER =
new com.google.protobuf.AbstractParser<EvaluateProcessorVersionRequest>() {
@java.lang.Override
public EvaluateProcessorVersionRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<EvaluateProcessorVersionRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<EvaluateProcessorVersionRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.documentai.v1beta3.EvaluateProcessorVersionRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/commons-lang | 35,272 | src/main/java/org/apache/commons/lang3/RegExUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.lang3;
import java.util.Objects;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Helpers to process Strings using regular expressions.
*
* @see java.util.regex.Pattern
* @since 3.8
*/
public class RegExUtils {
/**
* The pattern to split version strings.
*/
static final Pattern VERSION_SPLIT_PATTERN = Pattern.compile("\\.");
/**
* Compiles the given regular expression into a pattern with the {@link Pattern#DOTALL} flag.
*
* @param regex The expression to be compiled.
* @return the given regular expression compiled into a pattern with the {@link Pattern#DOTALL} flag.
* @since 3.13.0
*/
public static Pattern dotAll(final String regex) {
return Pattern.compile(regex, Pattern.DOTALL);
}
/**
* Compiles the given regular expression into a pattern with the {@link Pattern#DOTALL} flag, then creates a matcher that will match the given text against
* this pattern.
*
* @param regex The expression to be compiled.
* @param text The character sequence to be matched.
* @return A new matcher for this pattern.
* @since 3.18.0
*/
public static Matcher dotAllMatcher(final String regex, final CharSequence text) {
return dotAll(regex).matcher(text);
}
/**
* Compiles the given regular expression into a pattern with the {@link Pattern#DOTALL} flag, then creates a matcher that will match the given text against
* this pattern.
*
* @param regex The expression to be compiled.
* @param text The character sequence to be matched.
* @return A new matcher for this pattern.
* @since 3.13.0
* @deprecated Use {@link #dotAllMatcher(String, CharSequence)}.
*/
@Deprecated
public static Matcher dotAllMatcher(final String regex, final String text) {
return dotAll(regex).matcher(text);
}
/**
* Removes each substring of the text String that matches the given regular expression pattern.
*
* This method is a {@code null} safe equivalent to:
* <ul>
* <li>{@code pattern.matcher(text).replaceAll(StringUtils.EMPTY)}</li>
* </ul>
*
* <p>A {@code null} reference passed to this method is a no-op.</p>
*
* <pre>{@code
* StringUtils.removeAll(null, *) = null
* StringUtils.removeAll("any", (Pattern) null) = "any"
* StringUtils.removeAll("any", Pattern.compile("")) = "any"
* StringUtils.removeAll("any", Pattern.compile(".*")) = ""
* StringUtils.removeAll("any", Pattern.compile(".+")) = ""
* StringUtils.removeAll("abc", Pattern.compile(".?")) = ""
* StringUtils.removeAll("A<__>\n<__>B", Pattern.compile("<.*>")) = "A\nB"
* StringUtils.removeAll("A<__>\n<__>B", Pattern.compile("(?s)<.*>")) = "AB"
* StringUtils.removeAll("A<__>\n<__>B", Pattern.compile("<.*>", Pattern.DOTALL)) = "AB"
* StringUtils.removeAll("ABCabc123abc", Pattern.compile("[a-z]")) = "ABC123"
* }</pre>
*
* @param text text to remove from, may be null.
* @param regex the regular expression to which this string is to be matched.
* @return the text with any removes processed,
* {@code null} if null String input.
*
* @see #replaceAll(CharSequence, Pattern, String)
* @see java.util.regex.Matcher#replaceAll(String)
* @see java.util.regex.Pattern
* @since 3.18.0
*/
public static String removeAll(final CharSequence text, final Pattern regex) {
return replaceAll(text, regex, StringUtils.EMPTY);
}
/**
* Removes each substring of the text String that matches the given regular expression pattern.
*
* This method is a {@code null} safe equivalent to:
* <ul>
* <li>{@code pattern.matcher(text).replaceAll(StringUtils.EMPTY)}</li>
* </ul>
*
* <p>A {@code null} reference passed to this method is a no-op.</p>
*
* <pre>{@code
* StringUtils.removeAll(null, *) = null
* StringUtils.removeAll("any", (Pattern) null) = "any"
* StringUtils.removeAll("any", Pattern.compile("")) = "any"
* StringUtils.removeAll("any", Pattern.compile(".*")) = ""
* StringUtils.removeAll("any", Pattern.compile(".+")) = ""
* StringUtils.removeAll("abc", Pattern.compile(".?")) = ""
* StringUtils.removeAll("A<__>\n<__>B", Pattern.compile("<.*>")) = "A\nB"
* StringUtils.removeAll("A<__>\n<__>B", Pattern.compile("(?s)<.*>")) = "AB"
* StringUtils.removeAll("A<__>\n<__>B", Pattern.compile("<.*>", Pattern.DOTALL)) = "AB"
* StringUtils.removeAll("ABCabc123abc", Pattern.compile("[a-z]")) = "ABC123"
* }</pre>
*
* @param text text to remove from, may be null.
* @param regex the regular expression to which this string is to be matched
* @return the text with any removes processed,
* {@code null} if null String input.
*
* @see #replaceAll(CharSequence, Pattern, String)
* @see java.util.regex.Matcher#replaceAll(String)
* @see java.util.regex.Pattern
* @deprecated Use {@link #removeAll(CharSequence, Pattern)}.
*/
@Deprecated
public static String removeAll(final String text, final Pattern regex) {
return replaceAll((CharSequence) text, regex, StringUtils.EMPTY);
}
/**
* Removes each substring of the text String that matches the given regular expression.
*
* This method is a {@code null} safe equivalent to:
* <ul>
* <li>{@code text.replaceAll(regex, StringUtils.EMPTY)}</li>
* <li>{@code Pattern.compile(regex).matcher(text).replaceAll(StringUtils.EMPTY)}</li>
* </ul>
*
* <p>A {@code null} reference passed to this method is a no-op.</p>
*
* <p>Unlike in the {@link #removePattern(CharSequence, String)} method, the {@link Pattern#DOTALL} option
* is NOT automatically added.
* To use the DOTALL option prepend {@code "(?s)"} to the regex.
* DOTALL is also known as single-line mode in Perl.</p>
*
* <pre>{@code
* StringUtils.removeAll(null, *) = null
* StringUtils.removeAll("any", (String) null) = "any"
* StringUtils.removeAll("any", "") = "any"
* StringUtils.removeAll("any", ".*") = ""
* StringUtils.removeAll("any", ".+") = ""
* StringUtils.removeAll("abc", ".?") = ""
* StringUtils.removeAll("A<__>\n<__>B", "<.*>") = "A\nB"
* StringUtils.removeAll("A<__>\n<__>B", "(?s)<.*>") = "AB"
* StringUtils.removeAll("ABCabc123abc", "[a-z]") = "ABC123"
* }</pre>
*
* @param text text to remove from, may be null
* @param regex the regular expression to which this string is to be matched
* @return the text with any removes processed,
* {@code null} if null String input.
*
* @throws java.util.regex.PatternSyntaxException
* if the regular expression's syntax is invalid.
*
* @see #replaceAll(String, String, String)
* @see #removePattern(CharSequence, String)
* @see String#replaceAll(String, String)
* @see java.util.regex.Pattern
* @see java.util.regex.Pattern#DOTALL
*/
public static String removeAll(final String text, final String regex) {
return replaceAll(text, regex, StringUtils.EMPTY);
}
/**
* Removes the first substring of the text string that matches the given regular expression pattern.
*
* This method is a {@code null} safe equivalent to:
* <ul>
* <li>{@code pattern.matcher(text).replaceFirst(StringUtils.EMPTY)}</li>
* </ul>
*
* <p>A {@code null} reference passed to this method is a no-op.</p>
*
* <pre>{@code
* StringUtils.removeFirst(null, *) = null
* StringUtils.removeFirst("any", (Pattern) null) = "any"
* StringUtils.removeFirst("any", Pattern.compile("")) = "any"
* StringUtils.removeFirst("any", Pattern.compile(".*")) = ""
* StringUtils.removeFirst("any", Pattern.compile(".+")) = ""
* StringUtils.removeFirst("abc", Pattern.compile(".?")) = "bc"
* StringUtils.removeFirst("A<__>\n<__>B", Pattern.compile("<.*>")) = "A\n<__>B"
* StringUtils.removeFirst("A<__>\n<__>B", Pattern.compile("(?s)<.*>")) = "AB"
* StringUtils.removeFirst("ABCabc123", Pattern.compile("[a-z]")) = "ABCbc123"
* StringUtils.removeFirst("ABCabc123abc", Pattern.compile("[a-z]+")) = "ABC123abc"
* }</pre>
*
* @param text text to remove from, may be null.
* @param regex the regular expression pattern to which this string is to be matched.
* @return the text with the first replacement processed,
* {@code null} if null String input.
*
* @see #replaceFirst(String, Pattern, String)
* @see java.util.regex.Matcher#replaceFirst(String)
* @see java.util.regex.Pattern
* @since 3.18.0
*/
public static String removeFirst(final CharSequence text, final Pattern regex) {
return replaceFirst(text, regex, StringUtils.EMPTY);
}
/**
* Removes the first substring of the text string that matches the given regular expression pattern.
*
* This method is a {@code null} safe equivalent to:
* <ul>
* <li>{@code pattern.matcher(text).replaceFirst(StringUtils.EMPTY)}</li>
* </ul>
*
* <p>A {@code null} reference passed to this method is a no-op.</p>
*
* <pre>{@code
* StringUtils.removeFirst(null, *) = null
* StringUtils.removeFirst("any", (Pattern) null) = "any"
* StringUtils.removeFirst("any", Pattern.compile("")) = "any"
* StringUtils.removeFirst("any", Pattern.compile(".*")) = ""
* StringUtils.removeFirst("any", Pattern.compile(".+")) = ""
* StringUtils.removeFirst("abc", Pattern.compile(".?")) = "bc"
* StringUtils.removeFirst("A<__>\n<__>B", Pattern.compile("<.*>")) = "A\n<__>B"
* StringUtils.removeFirst("A<__>\n<__>B", Pattern.compile("(?s)<.*>")) = "AB"
* StringUtils.removeFirst("ABCabc123", Pattern.compile("[a-z]")) = "ABCbc123"
* StringUtils.removeFirst("ABCabc123abc", Pattern.compile("[a-z]+")) = "ABC123abc"
* }</pre>
*
* @param text text to remove from, may be null.
* @param regex the regular expression pattern to which this string is to be matched.
* @return the text with the first replacement processed,
* {@code null} if null String input.
*
* @see #replaceFirst(String, Pattern, String)
* @see java.util.regex.Matcher#replaceFirst(String)
* @see java.util.regex.Pattern
* @deprecated Use {@link #removeFirst(CharSequence, Pattern)}.
*/
@Deprecated
public static String removeFirst(final String text, final Pattern regex) {
return replaceFirst(text, regex, StringUtils.EMPTY);
}
/**
* Removes the first substring of the text string that matches the given regular expression.
*
* This method is a {@code null} safe equivalent to:
* <ul>
* <li>{@code text.replaceFirst(regex, StringUtils.EMPTY)}</li>
* <li>{@code Pattern.compile(regex).matcher(text).replaceFirst(StringUtils.EMPTY)}</li>
* </ul>
*
* <p>A {@code null} reference passed to this method is a no-op.</p>
*
* <p>The {@link Pattern#DOTALL} option is NOT automatically added.
* To use the DOTALL option prepend {@code "(?s)"} to the regex.
* DOTALL is also known as single-line mode in Perl.</p>
*
* <pre>{@code
* StringUtils.removeFirst(null, *) = null
* StringUtils.removeFirst("any", (String) null) = "any"
* StringUtils.removeFirst("any", "") = "any"
* StringUtils.removeFirst("any", ".*") = ""
* StringUtils.removeFirst("any", ".+") = ""
* StringUtils.removeFirst("abc", ".?") = "bc"
* StringUtils.removeFirst("A<__>\n<__>B", "<.*>") = "A\n<__>B"
* StringUtils.removeFirst("A<__>\n<__>B", "(?s)<.*>") = "AB"
* StringUtils.removeFirst("ABCabc123", "[a-z]") = "ABCbc123"
* StringUtils.removeFirst("ABCabc123abc", "[a-z]+") = "ABC123abc"
* }</pre>
*
* @param text text to remove from, may be null.
* @param regex the regular expression to which this string is to be matched.
* @return the text with the first replacement processed,
* {@code null} if null String input.
*
* @throws java.util.regex.PatternSyntaxException
* if the regular expression's syntax is invalid.
*
* @see #replaceFirst(String, String, String)
* @see String#replaceFirst(String, String)
* @see java.util.regex.Pattern
* @see java.util.regex.Pattern#DOTALL
*/
public static String removeFirst(final String text, final String regex) {
return replaceFirst(text, regex, StringUtils.EMPTY);
}
/**
* Removes each substring of the source String that matches the given regular expression using the DOTALL option.
*
* This call is a {@code null} safe equivalent to:
* <ul>
* <li>{@code text.replaceAll("(?s)" + regex, StringUtils.EMPTY)}</li>
* <li>{@code Pattern.compile(regex, Pattern.DOTALL).matcher(text).replaceAll(StringUtils.EMPTY)}</li>
* </ul>
*
* <p>A {@code null} reference passed to this method is a no-op.</p>
*
* <pre>{@code
* StringUtils.removePattern(null, *) = null
* StringUtils.removePattern("any", (String) null) = "any"
* StringUtils.removePattern("A<__>\n<__>B", "<.*>") = "AB"
* StringUtils.removePattern("ABCabc123", "[a-z]") = "ABC123"
* }</pre>
*
* @param text
* the source string.
* @param regex
* the regular expression to which this string is to be matched.
* @return The resulting {@link String}.
* @see #replacePattern(CharSequence, String, String)
* @see String#replaceAll(String, String)
* @see Pattern#DOTALL
* @since 3.18.0
*/
public static String removePattern(final CharSequence text, final String regex) {
return replacePattern(text, regex, StringUtils.EMPTY);
}
/**
* Removes each substring of the source String that matches the given regular expression using the DOTALL option.
*
* This call is a {@code null} safe equivalent to:
* <ul>
* <li>{@code text.replaceAll("(?s)" + regex, StringUtils.EMPTY)}</li>
* <li>{@code Pattern.compile(regex, Pattern.DOTALL).matcher(text).replaceAll(StringUtils.EMPTY)}</li>
* </ul>
*
* <p>A {@code null} reference passed to this method is a no-op.</p>
*
* <pre>{@code
* StringUtils.removePattern(null, *) = null
* StringUtils.removePattern("any", (String) null) = "any"
* StringUtils.removePattern("A<__>\n<__>B", "<.*>") = "AB"
* StringUtils.removePattern("ABCabc123", "[a-z]") = "ABC123"
* }</pre>
*
* @param text
* the source string.
* @param regex
* the regular expression to which this string is to be matched.
* @return The resulting {@link String}.
* @see #replacePattern(CharSequence, String, String)
* @see String#replaceAll(String, String)
* @see Pattern#DOTALL
* @deprecated Use {@link #removePattern(CharSequence, String)}.
*/
@Deprecated
public static String removePattern(final String text, final String regex) {
return replacePattern((CharSequence) text, regex, StringUtils.EMPTY);
}
/**
* Replaces each substring of the text String that matches the given regular expression pattern with the given replacement.
*
* This method is a {@code null} safe equivalent to:
* <ul>
* <li>{@code pattern.matcher(text).replaceAll(replacement)}</li>
* </ul>
*
* <p>A {@code null} reference passed to this method is a no-op.</p>
*
* <pre>{@code
* StringUtils.replaceAll(null, *, *) = null
* StringUtils.replaceAll("any", (Pattern) null, *) = "any"
* StringUtils.replaceAll("any", *, null) = "any"
* StringUtils.replaceAll("", Pattern.compile(""), "zzz") = "zzz"
* StringUtils.replaceAll("", Pattern.compile(".*"), "zzz") = "zzz"
* StringUtils.replaceAll("", Pattern.compile(".+"), "zzz") = ""
* StringUtils.replaceAll("abc", Pattern.compile(""), "ZZ") = "ZZaZZbZZcZZ"
* StringUtils.replaceAll("<__>\n<__>", Pattern.compile("<.*>"), "z") = "z\nz"
* StringUtils.replaceAll("<__>\n<__>", Pattern.compile("<.*>", Pattern.DOTALL), "z") = "z"
* StringUtils.replaceAll("<__>\n<__>", Pattern.compile("(?s)<.*>"), "z") = "z"
* StringUtils.replaceAll("ABCabc123", Pattern.compile("[a-z]"), "_") = "ABC___123"
* StringUtils.replaceAll("ABCabc123", Pattern.compile("[^A-Z0-9]+"), "_") = "ABC_123"
* StringUtils.replaceAll("ABCabc123", Pattern.compile("[^A-Z0-9]+"), "") = "ABC123"
* StringUtils.replaceAll("Lorem ipsum dolor sit", Pattern.compile("( +)([a-z]+)"), "_$2") = "Lorem_ipsum_dolor_sit"
* }</pre>
*
* @param text text to search and replace in, may be null.
* @param regex the regular expression pattern to which this string is to be matched.
* @param replacement the string to be substituted for each match.
* @return the text with any replacements processed,
* {@code null} if null String input.
* @see java.util.regex.Matcher#replaceAll(String)
* @see java.util.regex.Pattern
*/
public static String replaceAll(final CharSequence text, final Pattern regex, final String replacement) {
if (ObjectUtils.anyNull(text, regex, replacement)) {
return toStringOrNull(text);
}
return regex.matcher(text).replaceAll(replacement);
}
/**
* Replaces each substring of the text String that matches the given regular expression pattern with the given replacement.
*
* This method is a {@code null} safe equivalent to:
* <ul>
* <li>{@code pattern.matcher(text).replaceAll(replacement)}</li>
* </ul>
*
* <p>A {@code null} reference passed to this method is a no-op.</p>
*
* <pre>{@code
* StringUtils.replaceAll(null, *, *) = null
* StringUtils.replaceAll("any", (Pattern) null, *) = "any"
* StringUtils.replaceAll("any", *, null) = "any"
* StringUtils.replaceAll("", Pattern.compile(""), "zzz") = "zzz"
* StringUtils.replaceAll("", Pattern.compile(".*"), "zzz") = "zzz"
* StringUtils.replaceAll("", Pattern.compile(".+"), "zzz") = ""
* StringUtils.replaceAll("abc", Pattern.compile(""), "ZZ") = "ZZaZZbZZcZZ"
* StringUtils.replaceAll("<__>\n<__>", Pattern.compile("<.*>"), "z") = "z\nz"
* StringUtils.replaceAll("<__>\n<__>", Pattern.compile("<.*>", Pattern.DOTALL), "z") = "z"
* StringUtils.replaceAll("<__>\n<__>", Pattern.compile("(?s)<.*>"), "z") = "z"
* StringUtils.replaceAll("ABCabc123", Pattern.compile("[a-z]"), "_") = "ABC___123"
* StringUtils.replaceAll("ABCabc123", Pattern.compile("[^A-Z0-9]+"), "_") = "ABC_123"
* StringUtils.replaceAll("ABCabc123", Pattern.compile("[^A-Z0-9]+"), "") = "ABC123"
* StringUtils.replaceAll("Lorem ipsum dolor sit", Pattern.compile("( +)([a-z]+)"), "_$2") = "Lorem_ipsum_dolor_sit"
* }</pre>
*
* @param text text to search and replace in, may be null.
* @param regex the regular expression pattern to which this string is to be matched.
* @param replacement the string to be substituted for each match.
* @return the text with any replacements processed,
* {@code null} if null String input.
* @see java.util.regex.Matcher#replaceAll(String)
* @see java.util.regex.Pattern
* @deprecated Use {@link #replaceAll(CharSequence, Pattern, String)}.
*/
@Deprecated
public static String replaceAll(final String text, final Pattern regex, final String replacement) {
return replaceAll((CharSequence) text, regex, replacement);
}
/**
* Replaces each substring of the text String that matches the given regular expression
* with the given replacement.
*
* This method is a {@code null} safe equivalent to:
* <ul>
* <li>{@code text.replaceAll(regex, replacement)}</li>
* <li>{@code Pattern.compile(regex).matcher(text).replaceAll(replacement)}</li>
* </ul>
*
* <p>A {@code null} reference passed to this method is a no-op.</p>
*
* <p>Unlike in the {@link #replacePattern(CharSequence, String, String)} method, the {@link Pattern#DOTALL} option
* is NOT automatically added.
* To use the DOTALL option prepend {@code "(?s)"} to the regex.
* DOTALL is also known as single-line mode in Perl.</p>
*
* <pre>{@code
* StringUtils.replaceAll(null, *, *) = null
* StringUtils.replaceAll("any", (String) null, *) = "any"
* StringUtils.replaceAll("any", *, null) = "any"
* StringUtils.replaceAll("", "", "zzz") = "zzz"
* StringUtils.replaceAll("", ".*", "zzz") = "zzz"
* StringUtils.replaceAll("", ".+", "zzz") = ""
* StringUtils.replaceAll("abc", "", "ZZ") = "ZZaZZbZZcZZ"
* StringUtils.replaceAll("<__>\n<__>", "<.*>", "z") = "z\nz"
* StringUtils.replaceAll("<__>\n<__>", "(?s)<.*>", "z") = "z"
* StringUtils.replaceAll("ABCabc123", "[a-z]", "_") = "ABC___123"
* StringUtils.replaceAll("ABCabc123", "[^A-Z0-9]+", "_") = "ABC_123"
* StringUtils.replaceAll("ABCabc123", "[^A-Z0-9]+", "") = "ABC123"
* StringUtils.replaceAll("Lorem ipsum dolor sit", "( +)([a-z]+)", "_$2") = "Lorem_ipsum_dolor_sit"
* }</pre>
*
* @param text text to search and replace in, may be null.
* @param regex the regular expression to which this string is to be matched.
* @param replacement the string to be substituted for each match.
* @return the text with any replacements processed,
* {@code null} if null String input.
* @throws java.util.regex.PatternSyntaxException
* if the regular expression's syntax is invalid.
* @see #replacePattern(String, String, String)
* @see String#replaceAll(String, String)
* @see java.util.regex.Pattern
* @see java.util.regex.Pattern#DOTALL
*/
public static String replaceAll(final String text, final String regex, final String replacement) {
if (ObjectUtils.anyNull(text, regex, replacement)) {
return text;
}
return text.replaceAll(regex, replacement);
}
/**
* Replaces the first substring of the text string that matches the given regular expression pattern
* with the given replacement.
*
* This method is a {@code null} safe equivalent to:
* <ul>
* <li>{@code pattern.matcher(text).replaceFirst(replacement)}</li>
* </ul>
*
* <p>A {@code null} reference passed to this method is a no-op.</p>
*
* <pre>{@code
* StringUtils.replaceFirst(null, *, *) = null
* StringUtils.replaceFirst("any", (Pattern) null, *) = "any"
* StringUtils.replaceFirst("any", *, null) = "any"
* StringUtils.replaceFirst("", Pattern.compile(""), "zzz") = "zzz"
* StringUtils.replaceFirst("", Pattern.compile(".*"), "zzz") = "zzz"
* StringUtils.replaceFirst("", Pattern.compile(".+"), "zzz") = ""
* StringUtils.replaceFirst("abc", Pattern.compile(""), "ZZ") = "ZZabc"
* StringUtils.replaceFirst("<__>\n<__>", Pattern.compile("<.*>"), "z") = "z\n<__>"
* StringUtils.replaceFirst("<__>\n<__>", Pattern.compile("(?s)<.*>"), "z") = "z"
* StringUtils.replaceFirst("ABCabc123", Pattern.compile("[a-z]"), "_") = "ABC_bc123"
* StringUtils.replaceFirst("ABCabc123abc", Pattern.compile("[^A-Z0-9]+"), "_") = "ABC_123abc"
* StringUtils.replaceFirst("ABCabc123abc", Pattern.compile("[^A-Z0-9]+"), "") = "ABC123abc"
* StringUtils.replaceFirst("Lorem ipsum dolor sit", Pattern.compile("( +)([a-z]+)"), "_$2") = "Lorem_ipsum dolor sit"
* }</pre>
*
* @param text text to search and replace in, may be null.
* @param regex the regular expression pattern to which this string is to be matched.
* @param replacement the string to be substituted for the first match
* @return the text with the first replacement processed,
* {@code null} if null String input.
* @see java.util.regex.Matcher#replaceFirst(String)
* @see java.util.regex.Pattern
* @since 3.18.0
*/
public static String replaceFirst(final CharSequence text, final Pattern regex, final String replacement) {
if (text == null || regex == null || replacement == null) {
return toStringOrNull(text);
}
return regex.matcher(text).replaceFirst(replacement);
}
/**
* Replaces the first substring of the text string that matches the given regular expression pattern
* with the given replacement.
*
* This method is a {@code null} safe equivalent to:
* <ul>
* <li>{@code pattern.matcher(text).replaceFirst(replacement)}</li>
* </ul>
*
* <p>A {@code null} reference passed to this method is a no-op.</p>
*
* <pre>{@code
* StringUtils.replaceFirst(null, *, *) = null
* StringUtils.replaceFirst("any", (Pattern) null, *) = "any"
* StringUtils.replaceFirst("any", *, null) = "any"
* StringUtils.replaceFirst("", Pattern.compile(""), "zzz") = "zzz"
* StringUtils.replaceFirst("", Pattern.compile(".*"), "zzz") = "zzz"
* StringUtils.replaceFirst("", Pattern.compile(".+"), "zzz") = ""
* StringUtils.replaceFirst("abc", Pattern.compile(""), "ZZ") = "ZZabc"
* StringUtils.replaceFirst("<__>\n<__>", Pattern.compile("<.*>"), "z") = "z\n<__>"
* StringUtils.replaceFirst("<__>\n<__>", Pattern.compile("(?s)<.*>"), "z") = "z"
* StringUtils.replaceFirst("ABCabc123", Pattern.compile("[a-z]"), "_") = "ABC_bc123"
* StringUtils.replaceFirst("ABCabc123abc", Pattern.compile("[^A-Z0-9]+"), "_") = "ABC_123abc"
* StringUtils.replaceFirst("ABCabc123abc", Pattern.compile("[^A-Z0-9]+"), "") = "ABC123abc"
* StringUtils.replaceFirst("Lorem ipsum dolor sit", Pattern.compile("( +)([a-z]+)"), "_$2") = "Lorem_ipsum dolor sit"
* }</pre>
*
* @param text text to search and replace in, may be null.
* @param regex the regular expression pattern to which this string is to be matched.
* @param replacement the string to be substituted for the first match.
* @return the text with the first replacement processed,
* {@code null} if null String input.
* @see java.util.regex.Matcher#replaceFirst(String)
* @see java.util.regex.Pattern
* @deprecated Use {@link #replaceFirst(CharSequence, Pattern, String)}.
*/
@Deprecated
public static String replaceFirst(final String text, final Pattern regex, final String replacement) {
return replaceFirst((CharSequence) text, regex, replacement);
}
/**
* Replaces the first substring of the text string that matches the given regular expression
* with the given replacement.
*
* This method is a {@code null} safe equivalent to:
* <ul>
* <li>{@code text.replaceFirst(regex, replacement)}</li>
* <li>{@code Pattern.compile(regex).matcher(text).replaceFirst(replacement)}</li>
* </ul>
*
* <p>A {@code null} reference passed to this method is a no-op.</p>
*
* <p>The {@link Pattern#DOTALL} option is NOT automatically added.
* To use the DOTALL option prepend {@code "(?s)"} to the regex.
* DOTALL is also known as single-line mode in Perl.</p>
*
* <pre>{@code
* StringUtils.replaceFirst(null, *, *) = null
* StringUtils.replaceFirst("any", (String) null, *) = "any"
* StringUtils.replaceFirst("any", *, null) = "any"
* StringUtils.replaceFirst("", "", "zzz") = "zzz"
* StringUtils.replaceFirst("", ".*", "zzz") = "zzz"
* StringUtils.replaceFirst("", ".+", "zzz") = ""
* StringUtils.replaceFirst("abc", "", "ZZ") = "ZZabc"
* StringUtils.replaceFirst("<__>\n<__>", "<.*>", "z") = "z\n<__>"
* StringUtils.replaceFirst("<__>\n<__>", "(?s)<.*>", "z") = "z"
* StringUtils.replaceFirst("ABCabc123", "[a-z]", "_") = "ABC_bc123"
* StringUtils.replaceFirst("ABCabc123abc", "[^A-Z0-9]+", "_") = "ABC_123abc"
* StringUtils.replaceFirst("ABCabc123abc", "[^A-Z0-9]+", "") = "ABC123abc"
* StringUtils.replaceFirst("Lorem ipsum dolor sit", "( +)([a-z]+)", "_$2") = "Lorem_ipsum dolor sit"
* }</pre>
*
* @param text text to search and replace in, may be null.
* @param regex the regular expression to which this string is to be matched.
* @param replacement the string to be substituted for the first match.
* @return the text with the first replacement processed,
* {@code null} if null String input.
* @throws java.util.regex.PatternSyntaxException
* if the regular expression's syntax is invalid.
* @see String#replaceFirst(String, String)
* @see java.util.regex.Pattern
* @see java.util.regex.Pattern#DOTALL
*/
public static String replaceFirst(final String text, final String regex, final String replacement) {
if (text == null || regex == null || replacement == null) {
return text;
}
return text.replaceFirst(regex, replacement);
}
/**
* Replaces each substring of the source String that matches the given regular expression with the given
* replacement using the {@link Pattern#DOTALL} option. DOTALL is also known as single-line mode in Perl.
*
* This call is a {@code null} safe equivalent to:
* <ul>
* <li>{@code text.replaceAll("(?s)" + regex, replacement)}</li>
* <li>{@code Pattern.compile(regex, Pattern.DOTALL).matcher(text).replaceAll(replacement)}</li>
* </ul>
*
* <p>A {@code null} reference passed to this method is a no-op.</p>
*
* <pre>{@code
* StringUtils.replacePattern(null, *, *) = null
* StringUtils.replacePattern("any", (String) null, *) = "any"
* StringUtils.replacePattern("any", *, null) = "any"
* StringUtils.replacePattern("", "", "zzz") = "zzz"
* StringUtils.replacePattern("", ".*", "zzz") = "zzz"
* StringUtils.replacePattern("", ".+", "zzz") = ""
* StringUtils.replacePattern("<__>\n<__>", "<.*>", "z") = "z"
* StringUtils.replacePattern("ABCabc123", "[a-z]", "_") = "ABC___123"
* StringUtils.replacePattern("ABCabc123", "[^A-Z0-9]+", "_") = "ABC_123"
* StringUtils.replacePattern("ABCabc123", "[^A-Z0-9]+", "") = "ABC123"
* StringUtils.replacePattern("Lorem ipsum dolor sit", "( +)([a-z]+)", "_$2") = "Lorem_ipsum_dolor_sit"
* }</pre>
*
* @param text
* the source string.
* @param regex
* the regular expression to which this string is to be matched.
* @param replacement
* the string to be substituted for each match.
* @return The resulting {@link String}.
* @see #replaceAll(String, String, String)
* @see String#replaceAll(String, String)
* @see Pattern#DOTALL
* @since 3.18.0
*/
public static String replacePattern(final CharSequence text, final String regex, final String replacement) {
if (ObjectUtils.anyNull(text, regex, replacement)) {
return toStringOrNull(text);
}
return dotAllMatcher(regex, text).replaceAll(replacement);
}
/**
* Replaces each substring of the source String that matches the given regular expression with the given
* replacement using the {@link Pattern#DOTALL} option. DOTALL is also known as single-line mode in Perl.
*
* This call is a {@code null} safe equivalent to:
* <ul>
* <li>{@code text.replaceAll("(?s)" + regex, replacement)}</li>
* <li>{@code Pattern.compile(regex, Pattern.DOTALL).matcher(text).replaceAll(replacement)}</li>
* </ul>
*
* <p>A {@code null} reference passed to this method is a no-op.</p>
*
* <pre>{@code
* StringUtils.replacePattern(null, *, *) = null
* StringUtils.replacePattern("any", (String) null, *) = "any"
* StringUtils.replacePattern("any", *, null) = "any"
* StringUtils.replacePattern("", "", "zzz") = "zzz"
* StringUtils.replacePattern("", ".*", "zzz") = "zzz"
* StringUtils.replacePattern("", ".+", "zzz") = ""
* StringUtils.replacePattern("<__>\n<__>", "<.*>", "z") = "z"
* StringUtils.replacePattern("ABCabc123", "[a-z]", "_") = "ABC___123"
* StringUtils.replacePattern("ABCabc123", "[^A-Z0-9]+", "_") = "ABC_123"
* StringUtils.replacePattern("ABCabc123", "[^A-Z0-9]+", "") = "ABC123"
* StringUtils.replacePattern("Lorem ipsum dolor sit", "( +)([a-z]+)", "_$2") = "Lorem_ipsum_dolor_sit"
* }</pre>
*
* @param text
* the source string.
* @param regex
* the regular expression to which this string is to be matched.
* @param replacement
* the string to be substituted for each match.
* @return The resulting {@link String}.
* @see #replaceAll(String, String, String)
* @see String#replaceAll(String, String)
* @see Pattern#DOTALL
* @deprecated Use {@link #replacePattern(CharSequence, String, String)}.
*/
@Deprecated
public static String replacePattern(final String text, final String regex, final String replacement) {
return replacePattern((CharSequence) text, regex, replacement);
}
private static String toStringOrNull(final CharSequence text) {
return Objects.toString(text, null);
}
/**
* Make private in 4.0.
*
* @deprecated TODO Make private in 4.0.
*/
@Deprecated
public RegExUtils() {
// empty
}
}
|
googleapis/google-cloud-java | 35,090 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/IndexServiceProto.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/index_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
public final class IndexServiceProto {
private IndexServiceProto() {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry);
}
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_CreateIndexRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_CreateIndexRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_CreateIndexOperationMetadata_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_CreateIndexOperationMetadata_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_GetIndexRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_GetIndexRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_ListIndexesRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_ListIndexesRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_ListIndexesResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_ListIndexesResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_UpdateIndexRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_UpdateIndexRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_UpdateIndexOperationMetadata_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_UpdateIndexOperationMetadata_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_DatapointFieldMapping_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_DatapointFieldMapping_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_DatapointFieldMapping_Restrict_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_DatapointFieldMapping_Restrict_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_DatapointFieldMapping_NumericRestrict_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_DatapointFieldMapping_NumericRestrict_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_BigQuerySourceConfig_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_BigQuerySourceConfig_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexOperationMetadata_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexOperationMetadata_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_DeleteIndexRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_DeleteIndexRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_UpsertDatapointsRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_UpsertDatapointsRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_UpsertDatapointsResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_UpsertDatapointsResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_RemoveDatapointsRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_RemoveDatapointsRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_RemoveDatapointsResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_RemoveDatapointsResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborSearchOperationMetadata_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborSearchOperationMetadata_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborSearchOperationMetadata_RecordError_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborSearchOperationMetadata_RecordError_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborSearchOperationMetadata_ContentValidationStats_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborSearchOperationMetadata_ContentValidationStats_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor descriptor;
static {
java.lang.String[] descriptorData = {
"\n3google/cloud/aiplatform/v1beta1/index_"
+ "service.proto\022\037google.cloud.aiplatform.v"
+ "1beta1\032\034google/api/annotations.proto\032\027go"
+ "ogle/api/client.proto\032\037google/api/field_"
+ "behavior.proto\032\031google/api/resource.prot"
+ "o\032+google/cloud/aiplatform/v1beta1/index"
+ ".proto\032/google/cloud/aiplatform/v1beta1/"
+ "operation.proto\032#google/longrunning/oper"
+ "ations.proto\032\033google/protobuf/empty.prot"
+ "o\032 google/protobuf/field_mask.proto\"\213\001\n\022"
+ "CreateIndexRequest\0229\n\006parent\030\001 \001(\tB)\340A\002\372"
+ "A#\n!locations.googleapis.com/Location\022:\n"
+ "\005index\030\002 \001(\0132&.google.cloud.aiplatform.v"
+ "1beta1.IndexB\003\340A\002\"\360\001\n\034CreateIndexOperati"
+ "onMetadata\022S\n\020generic_metadata\030\001 \001(\01329.g"
+ "oogle.cloud.aiplatform.v1beta1.GenericOp"
+ "erationMetadata\022{\n*nearest_neighbor_sear"
+ "ch_operation_metadata\030\002 \001(\0132G.google.clo"
+ "ud.aiplatform.v1beta1.NearestNeighborSea"
+ "rchOperationMetadata\"H\n\017GetIndexRequest\022"
+ "5\n\004name\030\001 \001(\tB\'\340A\002\372A!\n\037aiplatform.google"
+ "apis.com/Index\"\265\001\n\022ListIndexesRequest\0229\n"
+ "\006parent\030\001 \001(\tB)\340A\002\372A#\n!locations.googlea"
+ "pis.com/Location\022\016\n\006filter\030\002 \001(\t\022\021\n\tpage"
+ "_size\030\003 \001(\005\022\022\n\npage_token\030\004 \001(\t\022-\n\tread_"
+ "mask\030\005 \001(\0132\032.google.protobuf.FieldMask\"g"
+ "\n\023ListIndexesResponse\0227\n\007indexes\030\001 \003(\0132&"
+ ".google.cloud.aiplatform.v1beta1.Index\022\027"
+ "\n\017next_page_token\030\002 \001(\t\"\201\001\n\022UpdateIndexR"
+ "equest\022:\n\005index\030\001 \001(\0132&.google.cloud.aip"
+ "latform.v1beta1.IndexB\003\340A\002\022/\n\013update_mas"
+ "k\030\002 \001(\0132\032.google.protobuf.FieldMask\"\360\001\n\034"
+ "UpdateIndexOperationMetadata\022S\n\020generic_"
+ "metadata\030\001 \001(\01329.google.cloud.aiplatform"
+ ".v1beta1.GenericOperationMetadata\022{\n*nea"
+ "rest_neighbor_search_operation_metadata\030"
+ "\002 \001(\0132G.google.cloud.aiplatform.v1beta1."
+ "NearestNeighborSearchOperationMetadata\"\207"
+ "\n\n\022ImportIndexRequest\0225\n\004name\030\001 \001(\tB\'\340A\002"
+ "\372A!\n\037aiplatform.googleapis.com/Index\022\"\n\025"
+ "is_complete_overwrite\030\002 \001(\010B\003\340A\001\022X\n\006conf"
+ "ig\030\003 \001(\0132C.google.cloud.aiplatform.v1bet"
+ "a1.ImportIndexRequest.ConnectorConfigB\003\340"
+ "A\002\032\273\010\n\017ConnectorConfig\022{\n\027big_query_sour"
+ "ce_config\030\001 \001(\0132X.google.cloud.aiplatfor"
+ "m.v1beta1.ImportIndexRequest.ConnectorCo"
+ "nfig.BigQuerySourceConfigH\000\032\355\005\n\025Datapoin"
+ "tFieldMapping\022\026\n\tid_column\030\001 \001(\tB\003\340A\002\022\035\n"
+ "\020embedding_column\030\002 \001(\tB\003\340A\002\022z\n\trestrict"
+ "s\030\003 \003(\0132b.google.cloud.aiplatform.v1beta"
+ "1.ImportIndexRequest.ConnectorConfig.Dat"
+ "apointFieldMapping.RestrictB\003\340A\001\022\211\001\n\021num"
+ "eric_restricts\030\004 \003(\0132i.google.cloud.aipl"
+ "atform.v1beta1.ImportIndexRequest.Connec"
+ "torConfig.DatapointFieldMapping.NumericR"
+ "estrictB\003\340A\001\022\035\n\020metadata_columns\030\005 \003(\tB\003"
+ "\340A\001\032W\n\010Restrict\022\026\n\tnamespace\030\001 \001(\tB\003\340A\002\022"
+ "\031\n\014allow_column\030\002 \003(\tB\003\340A\001\022\030\n\013deny_colum"
+ "n\030\003 \003(\tB\003\340A\001\032\234\002\n\017NumericRestrict\022\026\n\tname"
+ "space\030\001 \001(\tB\003\340A\002\022\031\n\014value_column\030\002 \001(\tB\003"
+ "\340A\001\022\214\001\n\nvalue_type\030\003 \001(\0162s.google.cloud."
+ "aiplatform.v1beta1.ImportIndexRequest.Co"
+ "nnectorConfig.DatapointFieldMapping.Nume"
+ "ricRestrict.ValueTypeB\003\340A\002\"G\n\tValueType\022"
+ "\032\n\026VALUE_TYPE_UNSPECIFIED\020\000\022\007\n\003INT\020\001\022\t\n\005"
+ "FLOAT\020\002\022\n\n\006DOUBLE\020\003\032\260\001\n\024BigQuerySourceCo"
+ "nfig\022\027\n\ntable_path\030\001 \001(\tB\003\340A\002\022\177\n\027datapoi"
+ "nt_field_mapping\030\002 \001(\0132Y.google.cloud.ai"
+ "platform.v1beta1.ImportIndexRequest.Conn"
+ "ectorConfig.DatapointFieldMappingB\003\340A\002B\010"
+ "\n\006source\"s\n\034ImportIndexOperationMetadata"
+ "\022S\n\020generic_metadata\030\001 \001(\01329.google.clou"
+ "d.aiplatform.v1beta1.GenericOperationMet"
+ "adata\"K\n\022DeleteIndexRequest\0225\n\004name\030\001 \001("
+ "\tB\'\340A\002\372A!\n\037aiplatform.googleapis.com/Ind"
+ "ex\"\314\001\n\027UpsertDatapointsRequest\0226\n\005index\030"
+ "\001 \001(\tB\'\340A\002\372A!\n\037aiplatform.googleapis.com"
+ "/Index\022C\n\ndatapoints\030\002 \003(\0132/.google.clou"
+ "d.aiplatform.v1beta1.IndexDatapoint\0224\n\013u"
+ "pdate_mask\030\003 \001(\0132\032.google.protobuf.Field"
+ "MaskB\003\340A\001\"\032\n\030UpsertDatapointsResponse\"h\n"
+ "\027RemoveDatapointsRequest\0226\n\005index\030\001 \001(\tB"
+ "\'\340A\002\372A!\n\037aiplatform.googleapis.com/Index"
+ "\022\025\n\rdatapoint_ids\030\002 \003(\t\"\032\n\030RemoveDatapoi"
+ "ntsResponse\"\340\t\n&NearestNeighborSearchOpe"
+ "rationMetadata\022\200\001\n\030content_validation_st"
+ "ats\030\001 \003(\0132^.google.cloud.aiplatform.v1be"
+ "ta1.NearestNeighborSearchOperationMetada"
+ "ta.ContentValidationStats\022\030\n\020data_bytes_"
+ "count\030\002 \001(\003\032\366\005\n\013RecordError\022w\n\nerror_typ"
+ "e\030\001 \001(\0162c.google.cloud.aiplatform.v1beta"
+ "1.NearestNeighborSearchOperationMetadata"
+ ".RecordError.RecordErrorType\022\025\n\rerror_me"
+ "ssage\030\002 \001(\t\022\026\n\016source_gcs_uri\030\003 \001(\t\022\024\n\014e"
+ "mbedding_id\030\004 \001(\t\022\022\n\nraw_record\030\005 \001(\t\"\224\004"
+ "\n\017RecordErrorType\022\032\n\026ERROR_TYPE_UNSPECIF"
+ "IED\020\000\022\016\n\nEMPTY_LINE\020\001\022\027\n\023INVALID_JSON_SY"
+ "NTAX\020\002\022\026\n\022INVALID_CSV_SYNTAX\020\003\022\027\n\023INVALI"
+ "D_AVRO_SYNTAX\020\004\022\030\n\024INVALID_EMBEDDING_ID\020"
+ "\005\022\033\n\027EMBEDDING_SIZE_MISMATCH\020\006\022\025\n\021NAMESP"
+ "ACE_MISSING\020\007\022\021\n\rPARSING_ERROR\020\010\022\027\n\023DUPL"
+ "ICATE_NAMESPACE\020\t\022\023\n\017OP_IN_DATAPOINT\020\n\022\023"
+ "\n\017MULTIPLE_VALUES\020\013\022\031\n\025INVALID_NUMERIC_V"
+ "ALUE\020\014\022\024\n\020INVALID_ENCODING\020\r\022\035\n\031INVALID_"
+ "SPARSE_DIMENSIONS\020\016\022\027\n\023INVALID_TOKEN_VAL"
+ "UE\020\017\022\034\n\030INVALID_SPARSE_EMBEDDING\020\020\022\025\n\021IN"
+ "VALID_EMBEDDING\020\021\022\036\n\032INVALID_EMBEDDING_M"
+ "ETADATA\020\022\022)\n%EMBEDDING_METADATA_EXCEEDS_"
+ "SIZE_LIMIT\020\023\032\237\002\n\026ContentValidationStats\022"
+ "\026\n\016source_gcs_uri\030\001 \001(\t\022\032\n\022valid_record_"
+ "count\030\002 \001(\003\022\034\n\024invalid_record_count\030\003 \001("
+ "\003\022k\n\016partial_errors\030\004 \003(\0132S.google.cloud"
+ ".aiplatform.v1beta1.NearestNeighborSearc"
+ "hOperationMetadata.RecordError\022!\n\031valid_"
+ "sparse_record_count\030\005 \001(\003\022#\n\033invalid_spa"
+ "rse_record_count\030\006 \001(\0032\341\r\n\014IndexService\022"
+ "\331\001\n\013CreateIndex\0223.google.cloud.aiplatfor"
+ "m.v1beta1.CreateIndexRequest\032\035.google.lo"
+ "ngrunning.Operation\"v\312A%\n\005Index\022\034CreateI"
+ "ndexOperationMetadata\332A\014parent,index\202\323\344\223"
+ "\0029\"0/v1beta1/{parent=projects/*/location"
+ "s/*}/indexes:\005index\022\245\001\n\010GetIndex\0220.googl"
+ "e.cloud.aiplatform.v1beta1.GetIndexReque"
+ "st\032&.google.cloud.aiplatform.v1beta1.Ind"
+ "ex\"?\332A\004name\202\323\344\223\0022\0220/v1beta1/{name=projec"
+ "ts/*/locations/*/indexes/*}\022\315\001\n\013ImportIn"
+ "dex\0223.google.cloud.aiplatform.v1beta1.Im"
+ "portIndexRequest\032\035.google.longrunning.Op"
+ "eration\"j\312A%\n\005Index\022\034ImportIndexOperatio"
+ "nMetadata\202\323\344\223\002<\"7/v1beta1/{name=projects"
+ "/*/locations/*/indexes/*}:import:\001*\022\273\001\n\013"
+ "ListIndexes\0223.google.cloud.aiplatform.v1"
+ "beta1.ListIndexesRequest\0324.google.cloud."
+ "aiplatform.v1beta1.ListIndexesResponse\"A"
+ "\332A\006parent\202\323\344\223\0022\0220/v1beta1/{parent=projec"
+ "ts/*/locations/*}/indexes\022\345\001\n\013UpdateInde"
+ "x\0223.google.cloud.aiplatform.v1beta1.Upda"
+ "teIndexRequest\032\035.google.longrunning.Oper"
+ "ation\"\201\001\312A%\n\005Index\022\034UpdateIndexOperation"
+ "Metadata\332A\021index,update_mask\202\323\344\223\002?26/v1b"
+ "eta1/{index.name=projects/*/locations/*/"
+ "indexes/*}:\005index\022\325\001\n\013DeleteIndex\0223.goog"
+ "le.cloud.aiplatform.v1beta1.DeleteIndexR"
+ "equest\032\035.google.longrunning.Operation\"r\312"
+ "A0\n\025google.protobuf.Empty\022\027DeleteOperati"
+ "onMetadata\332A\004name\202\323\344\223\0022*0/v1beta1/{name="
+ "projects/*/locations/*/indexes/*}\022\326\001\n\020Up"
+ "sertDatapoints\0228.google.cloud.aiplatform"
+ ".v1beta1.UpsertDatapointsRequest\0329.googl"
+ "e.cloud.aiplatform.v1beta1.UpsertDatapoi"
+ "ntsResponse\"M\202\323\344\223\002G\"B/v1beta1/{index=pro"
+ "jects/*/locations/*/indexes/*}:upsertDat"
+ "apoints:\001*\022\326\001\n\020RemoveDatapoints\0228.google"
+ ".cloud.aiplatform.v1beta1.RemoveDatapoin"
+ "tsRequest\0329.google.cloud.aiplatform.v1be"
+ "ta1.RemoveDatapointsResponse\"M\202\323\344\223\002G\"B/v"
+ "1beta1/{index=projects/*/locations/*/ind"
+ "exes/*}:removeDatapoints:\001*\032M\312A\031aiplatfo"
+ "rm.googleapis.com\322A.https://www.googleap"
+ "is.com/auth/cloud-platformB\350\001\n#com.googl"
+ "e.cloud.aiplatform.v1beta1B\021IndexService"
+ "ProtoP\001ZCcloud.google.com/go/aiplatform/"
+ "apiv1beta1/aiplatformpb;aiplatformpb\252\002\037G"
+ "oogle.Cloud.AIPlatform.V1Beta1\312\002\037Google\\"
+ "Cloud\\AIPlatform\\V1beta1\352\002\"Google::Cloud"
+ "::AIPlatform::V1beta1b\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.AnnotationsProto.getDescriptor(),
com.google.api.ClientProto.getDescriptor(),
com.google.api.FieldBehaviorProto.getDescriptor(),
com.google.api.ResourceProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.IndexProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.OperationProto.getDescriptor(),
com.google.longrunning.OperationsProto.getDescriptor(),
com.google.protobuf.EmptyProto.getDescriptor(),
com.google.protobuf.FieldMaskProto.getDescriptor(),
});
internal_static_google_cloud_aiplatform_v1beta1_CreateIndexRequest_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_google_cloud_aiplatform_v1beta1_CreateIndexRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_CreateIndexRequest_descriptor,
new java.lang.String[] {
"Parent", "Index",
});
internal_static_google_cloud_aiplatform_v1beta1_CreateIndexOperationMetadata_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_google_cloud_aiplatform_v1beta1_CreateIndexOperationMetadata_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_CreateIndexOperationMetadata_descriptor,
new java.lang.String[] {
"GenericMetadata", "NearestNeighborSearchOperationMetadata",
});
internal_static_google_cloud_aiplatform_v1beta1_GetIndexRequest_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_google_cloud_aiplatform_v1beta1_GetIndexRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_GetIndexRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_google_cloud_aiplatform_v1beta1_ListIndexesRequest_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_google_cloud_aiplatform_v1beta1_ListIndexesRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_ListIndexesRequest_descriptor,
new java.lang.String[] {
"Parent", "Filter", "PageSize", "PageToken", "ReadMask",
});
internal_static_google_cloud_aiplatform_v1beta1_ListIndexesResponse_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_google_cloud_aiplatform_v1beta1_ListIndexesResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_ListIndexesResponse_descriptor,
new java.lang.String[] {
"Indexes", "NextPageToken",
});
internal_static_google_cloud_aiplatform_v1beta1_UpdateIndexRequest_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_google_cloud_aiplatform_v1beta1_UpdateIndexRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_UpdateIndexRequest_descriptor,
new java.lang.String[] {
"Index", "UpdateMask",
});
internal_static_google_cloud_aiplatform_v1beta1_UpdateIndexOperationMetadata_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_google_cloud_aiplatform_v1beta1_UpdateIndexOperationMetadata_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_UpdateIndexOperationMetadata_descriptor,
new java.lang.String[] {
"GenericMetadata", "NearestNeighborSearchOperationMetadata",
});
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_descriptor,
new java.lang.String[] {
"Name", "IsCompleteOverwrite", "Config",
});
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_descriptor,
new java.lang.String[] {
"BigQuerySourceConfig", "Source",
});
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_DatapointFieldMapping_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_DatapointFieldMapping_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_DatapointFieldMapping_descriptor,
new java.lang.String[] {
"IdColumn", "EmbeddingColumn", "Restricts", "NumericRestricts", "MetadataColumns",
});
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_DatapointFieldMapping_Restrict_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_DatapointFieldMapping_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_DatapointFieldMapping_Restrict_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_DatapointFieldMapping_Restrict_descriptor,
new java.lang.String[] {
"Namespace", "AllowColumn", "DenyColumn",
});
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_DatapointFieldMapping_NumericRestrict_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_DatapointFieldMapping_descriptor
.getNestedTypes()
.get(1);
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_DatapointFieldMapping_NumericRestrict_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_DatapointFieldMapping_NumericRestrict_descriptor,
new java.lang.String[] {
"Namespace", "ValueColumn", "ValueType",
});
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_BigQuerySourceConfig_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_descriptor
.getNestedTypes()
.get(1);
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_BigQuerySourceConfig_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexRequest_ConnectorConfig_BigQuerySourceConfig_descriptor,
new java.lang.String[] {
"TablePath", "DatapointFieldMapping",
});
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexOperationMetadata_descriptor =
getDescriptor().getMessageTypes().get(8);
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexOperationMetadata_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_ImportIndexOperationMetadata_descriptor,
new java.lang.String[] {
"GenericMetadata",
});
internal_static_google_cloud_aiplatform_v1beta1_DeleteIndexRequest_descriptor =
getDescriptor().getMessageTypes().get(9);
internal_static_google_cloud_aiplatform_v1beta1_DeleteIndexRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_DeleteIndexRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_google_cloud_aiplatform_v1beta1_UpsertDatapointsRequest_descriptor =
getDescriptor().getMessageTypes().get(10);
internal_static_google_cloud_aiplatform_v1beta1_UpsertDatapointsRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_UpsertDatapointsRequest_descriptor,
new java.lang.String[] {
"Index", "Datapoints", "UpdateMask",
});
internal_static_google_cloud_aiplatform_v1beta1_UpsertDatapointsResponse_descriptor =
getDescriptor().getMessageTypes().get(11);
internal_static_google_cloud_aiplatform_v1beta1_UpsertDatapointsResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_UpsertDatapointsResponse_descriptor,
new java.lang.String[] {});
internal_static_google_cloud_aiplatform_v1beta1_RemoveDatapointsRequest_descriptor =
getDescriptor().getMessageTypes().get(12);
internal_static_google_cloud_aiplatform_v1beta1_RemoveDatapointsRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_RemoveDatapointsRequest_descriptor,
new java.lang.String[] {
"Index", "DatapointIds",
});
internal_static_google_cloud_aiplatform_v1beta1_RemoveDatapointsResponse_descriptor =
getDescriptor().getMessageTypes().get(13);
internal_static_google_cloud_aiplatform_v1beta1_RemoveDatapointsResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_RemoveDatapointsResponse_descriptor,
new java.lang.String[] {});
internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborSearchOperationMetadata_descriptor =
getDescriptor().getMessageTypes().get(14);
internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborSearchOperationMetadata_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborSearchOperationMetadata_descriptor,
new java.lang.String[] {
"ContentValidationStats", "DataBytesCount",
});
internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborSearchOperationMetadata_RecordError_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborSearchOperationMetadata_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborSearchOperationMetadata_RecordError_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborSearchOperationMetadata_RecordError_descriptor,
new java.lang.String[] {
"ErrorType", "ErrorMessage", "SourceGcsUri", "EmbeddingId", "RawRecord",
});
internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborSearchOperationMetadata_ContentValidationStats_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborSearchOperationMetadata_descriptor
.getNestedTypes()
.get(1);
internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborSearchOperationMetadata_ContentValidationStats_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborSearchOperationMetadata_ContentValidationStats_descriptor,
new java.lang.String[] {
"SourceGcsUri",
"ValidRecordCount",
"InvalidRecordCount",
"PartialErrors",
"ValidSparseRecordCount",
"InvalidSparseRecordCount",
});
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.ClientProto.defaultHost);
registry.add(com.google.api.FieldBehaviorProto.fieldBehavior);
registry.add(com.google.api.AnnotationsProto.http);
registry.add(com.google.api.ClientProto.methodSignature);
registry.add(com.google.api.ClientProto.oauthScopes);
registry.add(com.google.api.ResourceProto.resourceReference);
registry.add(com.google.longrunning.OperationsProto.operationInfo);
com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
descriptor, registry);
com.google.api.AnnotationsProto.getDescriptor();
com.google.api.ClientProto.getDescriptor();
com.google.api.FieldBehaviorProto.getDescriptor();
com.google.api.ResourceProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.IndexProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.OperationProto.getDescriptor();
com.google.longrunning.OperationsProto.getDescriptor();
com.google.protobuf.EmptyProto.getDescriptor();
com.google.protobuf.FieldMaskProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
|
googleapis/google-cloud-java | 35,863 | java-automl/proto-google-cloud-automl-v1beta1/src/main/java/com/google/cloud/automl/v1beta1/ListDatasetsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/automl/v1beta1/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.automl.v1beta1;
/**
*
*
* <pre>
* Response message for [AutoMl.ListDatasets][google.cloud.automl.v1beta1.AutoMl.ListDatasets].
* </pre>
*
* Protobuf type {@code google.cloud.automl.v1beta1.ListDatasetsResponse}
*/
public final class ListDatasetsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.automl.v1beta1.ListDatasetsResponse)
ListDatasetsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDatasetsResponse.newBuilder() to construct.
private ListDatasetsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDatasetsResponse() {
datasets_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDatasetsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_ListDatasetsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_ListDatasetsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.automl.v1beta1.ListDatasetsResponse.class,
com.google.cloud.automl.v1beta1.ListDatasetsResponse.Builder.class);
}
public static final int DATASETS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.automl.v1beta1.Dataset> datasets_;
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.automl.v1beta1.Dataset> getDatasetsList() {
return datasets_;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.automl.v1beta1.DatasetOrBuilder>
getDatasetsOrBuilderList() {
return datasets_;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public int getDatasetsCount() {
return datasets_.size();
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public com.google.cloud.automl.v1beta1.Dataset getDatasets(int index) {
return datasets_.get(index);
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public com.google.cloud.automl.v1beta1.DatasetOrBuilder getDatasetsOrBuilder(int index) {
return datasets_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to [ListDatasetsRequest.page_token][google.cloud.automl.v1beta1.ListDatasetsRequest.page_token] to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to [ListDatasetsRequest.page_token][google.cloud.automl.v1beta1.ListDatasetsRequest.page_token] to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < datasets_.size(); i++) {
output.writeMessage(1, datasets_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < datasets_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, datasets_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.automl.v1beta1.ListDatasetsResponse)) {
return super.equals(obj);
}
com.google.cloud.automl.v1beta1.ListDatasetsResponse other =
(com.google.cloud.automl.v1beta1.ListDatasetsResponse) obj;
if (!getDatasetsList().equals(other.getDatasetsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDatasetsCount() > 0) {
hash = (37 * hash) + DATASETS_FIELD_NUMBER;
hash = (53 * hash) + getDatasetsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.automl.v1beta1.ListDatasetsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.automl.v1beta1.ListDatasetsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.ListDatasetsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.automl.v1beta1.ListDatasetsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.ListDatasetsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.automl.v1beta1.ListDatasetsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.ListDatasetsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.automl.v1beta1.ListDatasetsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.ListDatasetsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.automl.v1beta1.ListDatasetsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.ListDatasetsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.automl.v1beta1.ListDatasetsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.automl.v1beta1.ListDatasetsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for [AutoMl.ListDatasets][google.cloud.automl.v1beta1.AutoMl.ListDatasets].
* </pre>
*
* Protobuf type {@code google.cloud.automl.v1beta1.ListDatasetsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.automl.v1beta1.ListDatasetsResponse)
com.google.cloud.automl.v1beta1.ListDatasetsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_ListDatasetsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_ListDatasetsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.automl.v1beta1.ListDatasetsResponse.class,
com.google.cloud.automl.v1beta1.ListDatasetsResponse.Builder.class);
}
// Construct using com.google.cloud.automl.v1beta1.ListDatasetsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (datasetsBuilder_ == null) {
datasets_ = java.util.Collections.emptyList();
} else {
datasets_ = null;
datasetsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_ListDatasetsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.automl.v1beta1.ListDatasetsResponse getDefaultInstanceForType() {
return com.google.cloud.automl.v1beta1.ListDatasetsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.automl.v1beta1.ListDatasetsResponse build() {
com.google.cloud.automl.v1beta1.ListDatasetsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.automl.v1beta1.ListDatasetsResponse buildPartial() {
com.google.cloud.automl.v1beta1.ListDatasetsResponse result =
new com.google.cloud.automl.v1beta1.ListDatasetsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.automl.v1beta1.ListDatasetsResponse result) {
if (datasetsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
datasets_ = java.util.Collections.unmodifiableList(datasets_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.datasets_ = datasets_;
} else {
result.datasets_ = datasetsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.automl.v1beta1.ListDatasetsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.automl.v1beta1.ListDatasetsResponse) {
return mergeFrom((com.google.cloud.automl.v1beta1.ListDatasetsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.automl.v1beta1.ListDatasetsResponse other) {
if (other == com.google.cloud.automl.v1beta1.ListDatasetsResponse.getDefaultInstance())
return this;
if (datasetsBuilder_ == null) {
if (!other.datasets_.isEmpty()) {
if (datasets_.isEmpty()) {
datasets_ = other.datasets_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDatasetsIsMutable();
datasets_.addAll(other.datasets_);
}
onChanged();
}
} else {
if (!other.datasets_.isEmpty()) {
if (datasetsBuilder_.isEmpty()) {
datasetsBuilder_.dispose();
datasetsBuilder_ = null;
datasets_ = other.datasets_;
bitField0_ = (bitField0_ & ~0x00000001);
datasetsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDatasetsFieldBuilder()
: null;
} else {
datasetsBuilder_.addAllMessages(other.datasets_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.automl.v1beta1.Dataset m =
input.readMessage(
com.google.cloud.automl.v1beta1.Dataset.parser(), extensionRegistry);
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.add(m);
} else {
datasetsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.automl.v1beta1.Dataset> datasets_ =
java.util.Collections.emptyList();
private void ensureDatasetsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
datasets_ = new java.util.ArrayList<com.google.cloud.automl.v1beta1.Dataset>(datasets_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.automl.v1beta1.Dataset,
com.google.cloud.automl.v1beta1.Dataset.Builder,
com.google.cloud.automl.v1beta1.DatasetOrBuilder>
datasetsBuilder_;
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Dataset datasets = 1;</code>
*/
public java.util.List<com.google.cloud.automl.v1beta1.Dataset> getDatasetsList() {
if (datasetsBuilder_ == null) {
return java.util.Collections.unmodifiableList(datasets_);
} else {
return datasetsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Dataset datasets = 1;</code>
*/
public int getDatasetsCount() {
if (datasetsBuilder_ == null) {
return datasets_.size();
} else {
return datasetsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Dataset datasets = 1;</code>
*/
public com.google.cloud.automl.v1beta1.Dataset getDatasets(int index) {
if (datasetsBuilder_ == null) {
return datasets_.get(index);
} else {
return datasetsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Dataset datasets = 1;</code>
*/
public Builder setDatasets(int index, com.google.cloud.automl.v1beta1.Dataset value) {
if (datasetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatasetsIsMutable();
datasets_.set(index, value);
onChanged();
} else {
datasetsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Dataset datasets = 1;</code>
*/
public Builder setDatasets(
int index, com.google.cloud.automl.v1beta1.Dataset.Builder builderForValue) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.set(index, builderForValue.build());
onChanged();
} else {
datasetsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Dataset datasets = 1;</code>
*/
public Builder addDatasets(com.google.cloud.automl.v1beta1.Dataset value) {
if (datasetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatasetsIsMutable();
datasets_.add(value);
onChanged();
} else {
datasetsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Dataset datasets = 1;</code>
*/
public Builder addDatasets(int index, com.google.cloud.automl.v1beta1.Dataset value) {
if (datasetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatasetsIsMutable();
datasets_.add(index, value);
onChanged();
} else {
datasetsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Dataset datasets = 1;</code>
*/
public Builder addDatasets(com.google.cloud.automl.v1beta1.Dataset.Builder builderForValue) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.add(builderForValue.build());
onChanged();
} else {
datasetsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Dataset datasets = 1;</code>
*/
public Builder addDatasets(
int index, com.google.cloud.automl.v1beta1.Dataset.Builder builderForValue) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.add(index, builderForValue.build());
onChanged();
} else {
datasetsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Dataset datasets = 1;</code>
*/
public Builder addAllDatasets(
java.lang.Iterable<? extends com.google.cloud.automl.v1beta1.Dataset> values) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, datasets_);
onChanged();
} else {
datasetsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Dataset datasets = 1;</code>
*/
public Builder clearDatasets() {
if (datasetsBuilder_ == null) {
datasets_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
datasetsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Dataset datasets = 1;</code>
*/
public Builder removeDatasets(int index) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.remove(index);
onChanged();
} else {
datasetsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Dataset datasets = 1;</code>
*/
public com.google.cloud.automl.v1beta1.Dataset.Builder getDatasetsBuilder(int index) {
return getDatasetsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Dataset datasets = 1;</code>
*/
public com.google.cloud.automl.v1beta1.DatasetOrBuilder getDatasetsOrBuilder(int index) {
if (datasetsBuilder_ == null) {
return datasets_.get(index);
} else {
return datasetsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Dataset datasets = 1;</code>
*/
public java.util.List<? extends com.google.cloud.automl.v1beta1.DatasetOrBuilder>
getDatasetsOrBuilderList() {
if (datasetsBuilder_ != null) {
return datasetsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(datasets_);
}
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Dataset datasets = 1;</code>
*/
public com.google.cloud.automl.v1beta1.Dataset.Builder addDatasetsBuilder() {
return getDatasetsFieldBuilder()
.addBuilder(com.google.cloud.automl.v1beta1.Dataset.getDefaultInstance());
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Dataset datasets = 1;</code>
*/
public com.google.cloud.automl.v1beta1.Dataset.Builder addDatasetsBuilder(int index) {
return getDatasetsFieldBuilder()
.addBuilder(index, com.google.cloud.automl.v1beta1.Dataset.getDefaultInstance());
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Dataset datasets = 1;</code>
*/
public java.util.List<com.google.cloud.automl.v1beta1.Dataset.Builder>
getDatasetsBuilderList() {
return getDatasetsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.automl.v1beta1.Dataset,
com.google.cloud.automl.v1beta1.Dataset.Builder,
com.google.cloud.automl.v1beta1.DatasetOrBuilder>
getDatasetsFieldBuilder() {
if (datasetsBuilder_ == null) {
datasetsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.automl.v1beta1.Dataset,
com.google.cloud.automl.v1beta1.Dataset.Builder,
com.google.cloud.automl.v1beta1.DatasetOrBuilder>(
datasets_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
datasets_ = null;
}
return datasetsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to [ListDatasetsRequest.page_token][google.cloud.automl.v1beta1.ListDatasetsRequest.page_token] to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to [ListDatasetsRequest.page_token][google.cloud.automl.v1beta1.ListDatasetsRequest.page_token] to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to [ListDatasetsRequest.page_token][google.cloud.automl.v1beta1.ListDatasetsRequest.page_token] to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to [ListDatasetsRequest.page_token][google.cloud.automl.v1beta1.ListDatasetsRequest.page_token] to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to [ListDatasetsRequest.page_token][google.cloud.automl.v1beta1.ListDatasetsRequest.page_token] to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.automl.v1beta1.ListDatasetsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.automl.v1beta1.ListDatasetsResponse)
private static final com.google.cloud.automl.v1beta1.ListDatasetsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.automl.v1beta1.ListDatasetsResponse();
}
public static com.google.cloud.automl.v1beta1.ListDatasetsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDatasetsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListDatasetsResponse>() {
@java.lang.Override
public ListDatasetsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDatasetsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDatasetsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.automl.v1beta1.ListDatasetsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/jdk8 | 35,978 | jdk/src/share/classes/sun/net/www/http/HttpClient.java | /*
* Copyright (c) 1994, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.net.www.http;
import java.io.*;
import java.net.*;
import java.util.Locale;
import sun.net.NetworkClient;
import sun.net.ProgressSource;
import sun.net.www.MessageHeader;
import sun.net.www.HeaderParser;
import sun.net.www.MeteredStream;
import sun.net.www.ParseUtil;
import sun.net.www.protocol.http.HttpURLConnection;
import sun.util.logging.PlatformLogger;
import static sun.net.www.protocol.http.HttpURLConnection.TunnelState.*;
/**
* @author Herb Jellinek
* @author Dave Brown
*/
public class HttpClient extends NetworkClient {
// whether this httpclient comes from the cache
protected boolean cachedHttpClient = false;
protected boolean inCache;
// Http requests we send
MessageHeader requests;
// Http data we send with the headers
PosterOutputStream poster = null;
// true if we are in streaming mode (fixed length or chunked)
boolean streaming;
// if we've had one io error
boolean failedOnce = false;
/** Response code for CONTINUE */
private boolean ignoreContinue = true;
private static final int HTTP_CONTINUE = 100;
/** Default port number for http daemons. REMIND: make these private */
static final int httpPortNumber = 80;
/** return default port number (subclasses may override) */
protected int getDefaultPort () { return httpPortNumber; }
static private int getDefaultPort(String proto) {
if ("http".equalsIgnoreCase(proto))
return 80;
if ("https".equalsIgnoreCase(proto))
return 443;
return -1;
}
/* All proxying (generic as well as instance-specific) may be
* disabled through use of this flag
*/
protected boolean proxyDisabled;
// are we using proxy in this instance?
public boolean usingProxy = false;
// target host, port for the URL
protected String host;
protected int port;
/* where we cache currently open, persistent connections */
protected static KeepAliveCache kac = new KeepAliveCache();
private static boolean keepAliveProp = true;
// retryPostProp is true by default so as to preserve behavior
// from previous releases.
private static boolean retryPostProp = true;
volatile boolean keepingAlive = false; /* this is a keep-alive connection */
int keepAliveConnections = -1; /* number of keep-alives left */
/**Idle timeout value, in milliseconds. Zero means infinity,
* iff keepingAlive=true.
* Unfortunately, we can't always believe this one. If I'm connected
* through a Netscape proxy to a server that sent me a keep-alive
* time of 15 sec, the proxy unilaterally terminates my connection
* after 5 sec. So we have to hard code our effective timeout to
* 4 sec for the case where we're using a proxy. *SIGH*
*/
int keepAliveTimeout = 0;
/** whether the response is to be cached */
private CacheRequest cacheRequest = null;
/** Url being fetched. */
protected URL url;
/* if set, the client will be reused and must not be put in cache */
public boolean reuse = false;
// Traffic capture tool, if configured. See HttpCapture class for info
private HttpCapture capture = null;
private static final PlatformLogger logger = HttpURLConnection.getHttpLogger();
private static void logFinest(String msg) {
if (logger.isLoggable(PlatformLogger.Level.FINEST)) {
logger.finest(msg);
}
}
/**
* A NOP method kept for backwards binary compatibility
* @deprecated -- system properties are no longer cached.
*/
@Deprecated
public static synchronized void resetProperties() {
}
int getKeepAliveTimeout() {
return keepAliveTimeout;
}
static {
String keepAlive = java.security.AccessController.doPrivileged(
new sun.security.action.GetPropertyAction("http.keepAlive"));
String retryPost = java.security.AccessController.doPrivileged(
new sun.security.action.GetPropertyAction("sun.net.http.retryPost"));
if (keepAlive != null) {
keepAliveProp = Boolean.valueOf(keepAlive).booleanValue();
} else {
keepAliveProp = true;
}
if (retryPost != null) {
retryPostProp = Boolean.valueOf(retryPost).booleanValue();
} else
retryPostProp = true;
}
/**
* @return true iff http keep alive is set (i.e. enabled). Defaults
* to true if the system property http.keepAlive isn't set.
*/
public boolean getHttpKeepAliveSet() {
return keepAliveProp;
}
protected HttpClient() {
}
private HttpClient(URL url)
throws IOException {
this(url, (String)null, -1, false);
}
protected HttpClient(URL url,
boolean proxyDisabled) throws IOException {
this(url, null, -1, proxyDisabled);
}
/* This package-only CTOR should only be used for FTP piggy-backed on HTTP
* HTTP URL's that use this won't take advantage of keep-alive.
* Additionally, this constructor may be used as a last resort when the
* first HttpClient gotten through New() failed (probably b/c of a
* Keep-Alive mismatch).
*
* XXX That documentation is wrong ... it's not package-private any more
*/
public HttpClient(URL url, String proxyHost, int proxyPort)
throws IOException {
this(url, proxyHost, proxyPort, false);
}
protected HttpClient(URL url, Proxy p, int to) throws IOException {
proxy = (p == null) ? Proxy.NO_PROXY : p;
this.host = url.getHost();
this.url = url;
port = url.getPort();
if (port == -1) {
port = getDefaultPort();
}
setConnectTimeout(to);
capture = HttpCapture.getCapture(url);
openServer();
}
static protected Proxy newHttpProxy(String proxyHost, int proxyPort,
String proto) {
if (proxyHost == null || proto == null)
return Proxy.NO_PROXY;
int pport = proxyPort < 0 ? getDefaultPort(proto) : proxyPort;
InetSocketAddress saddr = InetSocketAddress.createUnresolved(proxyHost, pport);
return new Proxy(Proxy.Type.HTTP, saddr);
}
/*
* This constructor gives "ultimate" flexibility, including the ability
* to bypass implicit proxying. Sometimes we need to be using tunneling
* (transport or network level) instead of proxying (application level),
* for example when we don't want the application level data to become
* visible to third parties.
*
* @param url the URL to which we're connecting
* @param proxy proxy to use for this URL (e.g. forwarding)
* @param proxyPort proxy port to use for this URL
* @param proxyDisabled true to disable default proxying
*/
private HttpClient(URL url, String proxyHost, int proxyPort,
boolean proxyDisabled)
throws IOException {
this(url, proxyDisabled ? Proxy.NO_PROXY :
newHttpProxy(proxyHost, proxyPort, "http"), -1);
}
public HttpClient(URL url, String proxyHost, int proxyPort,
boolean proxyDisabled, int to)
throws IOException {
this(url, proxyDisabled ? Proxy.NO_PROXY :
newHttpProxy(proxyHost, proxyPort, "http"), to);
}
/* This class has no public constructor for HTTP. This method is used to
* get an HttpClient to the specified URL. If there's currently an
* active HttpClient to that server/port, you'll get that one.
*/
public static HttpClient New(URL url)
throws IOException {
return HttpClient.New(url, Proxy.NO_PROXY, -1, true, null);
}
public static HttpClient New(URL url, boolean useCache)
throws IOException {
return HttpClient.New(url, Proxy.NO_PROXY, -1, useCache, null);
}
public static HttpClient New(URL url, Proxy p, int to, boolean useCache,
HttpURLConnection httpuc) throws IOException
{
if (p == null) {
p = Proxy.NO_PROXY;
}
HttpClient ret = null;
/* see if one's already around */
if (useCache) {
ret = kac.get(url, null);
if (ret != null && httpuc != null &&
httpuc.streaming() &&
httpuc.getRequestMethod() == "POST") {
if (!ret.available()) {
ret.inCache = false;
ret.closeServer();
ret = null;
}
}
if (ret != null) {
if ((ret.proxy != null && ret.proxy.equals(p)) ||
(ret.proxy == null && p == null)) {
synchronized (ret) {
ret.cachedHttpClient = true;
assert ret.inCache;
ret.inCache = false;
if (httpuc != null && ret.needsTunneling())
httpuc.setTunnelState(TUNNELING);
logFinest("KeepAlive stream retrieved from the cache, " + ret);
}
} else {
// We cannot return this connection to the cache as it's
// KeepAliveTimeout will get reset. We simply close the connection.
// This should be fine as it is very rare that a connection
// to the same host will not use the same proxy.
synchronized(ret) {
ret.inCache = false;
ret.closeServer();
}
ret = null;
}
}
}
if (ret == null) {
ret = new HttpClient(url, p, to);
} else {
SecurityManager security = System.getSecurityManager();
if (security != null) {
if (ret.proxy == Proxy.NO_PROXY || ret.proxy == null) {
security.checkConnect(InetAddress.getByName(url.getHost()).getHostAddress(), url.getPort());
} else {
security.checkConnect(url.getHost(), url.getPort());
}
}
ret.url = url;
}
return ret;
}
public static HttpClient New(URL url, Proxy p, int to,
HttpURLConnection httpuc) throws IOException
{
return New(url, p, to, true, httpuc);
}
public static HttpClient New(URL url, String proxyHost, int proxyPort,
boolean useCache)
throws IOException {
return New(url, newHttpProxy(proxyHost, proxyPort, "http"),
-1, useCache, null);
}
public static HttpClient New(URL url, String proxyHost, int proxyPort,
boolean useCache, int to,
HttpURLConnection httpuc)
throws IOException {
return New(url, newHttpProxy(proxyHost, proxyPort, "http"),
to, useCache, httpuc);
}
/* return it to the cache as still usable, if:
* 1) It's keeping alive, AND
* 2) It still has some connections left, AND
* 3) It hasn't had a error (PrintStream.checkError())
* 4) It hasn't timed out
*
* If this client is not keepingAlive, it should have been
* removed from the cache in the parseHeaders() method.
*/
public void finished() {
if (reuse) /* will be reused */
return;
keepAliveConnections--;
poster = null;
if (keepAliveConnections > 0 && isKeepingAlive() &&
!(serverOutput.checkError())) {
/* This connection is keepingAlive && still valid.
* Return it to the cache.
*/
putInKeepAliveCache();
} else {
closeServer();
}
}
protected synchronized boolean available() {
boolean available = true;
int old = -1;
try {
try {
old = serverSocket.getSoTimeout();
serverSocket.setSoTimeout(1);
BufferedInputStream tmpbuf =
new BufferedInputStream(serverSocket.getInputStream());
int r = tmpbuf.read();
if (r == -1) {
logFinest("HttpClient.available(): " +
"read returned -1: not available");
available = false;
}
} catch (SocketTimeoutException e) {
logFinest("HttpClient.available(): " +
"SocketTimeout: its available");
} finally {
if (old != -1)
serverSocket.setSoTimeout(old);
}
} catch (IOException e) {
logFinest("HttpClient.available(): " +
"SocketException: not available");
available = false;
}
return available;
}
protected synchronized void putInKeepAliveCache() {
if (inCache) {
assert false : "Duplicate put to keep alive cache";
return;
}
inCache = true;
kac.put(url, null, this);
}
protected synchronized boolean isInKeepAliveCache() {
return inCache;
}
/*
* Close an idle connection to this URL (if it exists in the
* cache).
*/
public void closeIdleConnection() {
HttpClient http = kac.get(url, null);
if (http != null) {
http.closeServer();
}
}
/* We're very particular here about what our InputStream to the server
* looks like for reasons that are apparent if you can decipher the
* method parseHTTP(). That's why this method is overidden from the
* superclass.
*/
@Override
public void openServer(String server, int port) throws IOException {
serverSocket = doConnect(server, port);
try {
OutputStream out = serverSocket.getOutputStream();
if (capture != null) {
out = new HttpCaptureOutputStream(out, capture);
}
serverOutput = new PrintStream(
new BufferedOutputStream(out),
false, encoding);
} catch (UnsupportedEncodingException e) {
throw new InternalError(encoding+" encoding not found", e);
}
serverSocket.setTcpNoDelay(true);
}
/*
* Returns true if the http request should be tunneled through proxy.
* An example where this is the case is Https.
*/
public boolean needsTunneling() {
return false;
}
/*
* Returns true if this httpclient is from cache
*/
public synchronized boolean isCachedConnection() {
return cachedHttpClient;
}
/*
* Finish any work left after the socket connection is
* established. In the normal http case, it's a NO-OP. Subclass
* may need to override this. An example is Https, where for
* direct connection to the origin server, ssl handshake needs to
* be done; for proxy tunneling, the socket needs to be converted
* into an SSL socket before ssl handshake can take place.
*/
public void afterConnect() throws IOException, UnknownHostException {
// NO-OP. Needs to be overwritten by HttpsClient
}
/*
* call openServer in a privileged block
*/
private synchronized void privilegedOpenServer(final InetSocketAddress server)
throws IOException
{
try {
java.security.AccessController.doPrivileged(
new java.security.PrivilegedExceptionAction<Void>() {
public Void run() throws IOException {
openServer(server.getHostString(), server.getPort());
return null;
}
});
} catch (java.security.PrivilegedActionException pae) {
throw (IOException) pae.getException();
}
}
/*
* call super.openServer
*/
private void superOpenServer(final String proxyHost,
final int proxyPort)
throws IOException, UnknownHostException
{
super.openServer(proxyHost, proxyPort);
}
/*
*/
protected synchronized void openServer() throws IOException {
SecurityManager security = System.getSecurityManager();
if (security != null) {
security.checkConnect(host, port);
}
if (keepingAlive) { // already opened
return;
}
if (url.getProtocol().equals("http") ||
url.getProtocol().equals("https") ) {
if ((proxy != null) && (proxy.type() == Proxy.Type.HTTP)) {
sun.net.www.URLConnection.setProxiedHost(host);
privilegedOpenServer((InetSocketAddress) proxy.address());
usingProxy = true;
return;
} else {
// make direct connection
openServer(host, port);
usingProxy = false;
return;
}
} else {
/* we're opening some other kind of url, most likely an
* ftp url.
*/
if ((proxy != null) && (proxy.type() == Proxy.Type.HTTP)) {
sun.net.www.URLConnection.setProxiedHost(host);
privilegedOpenServer((InetSocketAddress) proxy.address());
usingProxy = true;
return;
} else {
// make direct connection
super.openServer(host, port);
usingProxy = false;
return;
}
}
}
public String getURLFile() throws IOException {
String fileName;
/**
* proxyDisabled is set by subclass HttpsClient!
*/
if (usingProxy && !proxyDisabled) {
// Do not use URLStreamHandler.toExternalForm as the fragment
// should not be part of the RequestURI. It should be an
// absolute URI which does not have a fragment part.
StringBuffer result = new StringBuffer(128);
result.append(url.getProtocol());
result.append(":");
if (url.getAuthority() != null && url.getAuthority().length() > 0) {
result.append("//");
result.append(url.getAuthority());
}
if (url.getPath() != null) {
result.append(url.getPath());
}
if (url.getQuery() != null) {
result.append('?');
result.append(url.getQuery());
}
fileName = result.toString();
} else {
fileName = url.getFile();
if ((fileName == null) || (fileName.length() == 0)) {
fileName = "/";
} else if (fileName.charAt(0) == '?') {
/* HTTP/1.1 spec says in 5.1.2. about Request-URI:
* "Note that the absolute path cannot be empty; if
* none is present in the original URI, it MUST be
* given as "/" (the server root)." So if the file
* name here has only a query string, the path is
* empty and we also have to add a "/".
*/
fileName = "/" + fileName;
}
}
if (fileName.indexOf('\n') == -1)
return fileName;
else
throw new java.net.MalformedURLException("Illegal character in URL");
}
/**
* @deprecated
*/
@Deprecated
public void writeRequests(MessageHeader head) {
requests = head;
requests.print(serverOutput);
serverOutput.flush();
}
public void writeRequests(MessageHeader head,
PosterOutputStream pos) throws IOException {
requests = head;
requests.print(serverOutput);
poster = pos;
if (poster != null)
poster.writeTo(serverOutput);
serverOutput.flush();
}
public void writeRequests(MessageHeader head,
PosterOutputStream pos,
boolean streaming) throws IOException {
this.streaming = streaming;
writeRequests(head, pos);
}
/** Parse the first line of the HTTP request. It usually looks
something like: "HTTP/1.0 <number> comment\r\n". */
public boolean parseHTTP(MessageHeader responses, ProgressSource pi, HttpURLConnection httpuc)
throws IOException {
/* If "HTTP/*" is found in the beginning, return true. Let
* HttpURLConnection parse the mime header itself.
*
* If this isn't valid HTTP, then we don't try to parse a header
* out of the beginning of the response into the responses,
* and instead just queue up the output stream to it's very beginning.
* This seems most reasonable, and is what the NN browser does.
*/
try {
serverInput = serverSocket.getInputStream();
if (capture != null) {
serverInput = new HttpCaptureInputStream(serverInput, capture);
}
serverInput = new BufferedInputStream(serverInput);
return (parseHTTPHeader(responses, pi, httpuc));
} catch (SocketTimeoutException stex) {
// We don't want to retry the request when the app. sets a timeout
// but don't close the server if timeout while waiting for 100-continue
if (ignoreContinue) {
closeServer();
}
throw stex;
} catch (IOException e) {
closeServer();
cachedHttpClient = false;
if (!failedOnce && requests != null) {
failedOnce = true;
if (getRequestMethod().equals("CONNECT") ||
(httpuc.getRequestMethod().equals("POST") &&
(!retryPostProp || streaming))) {
// do not retry the request
} else {
// try once more
openServer();
if (needsTunneling()) {
httpuc.doTunneling();
}
afterConnect();
writeRequests(requests, poster);
return parseHTTP(responses, pi, httpuc);
}
}
throw e;
}
}
private boolean parseHTTPHeader(MessageHeader responses, ProgressSource pi, HttpURLConnection httpuc)
throws IOException {
/* If "HTTP/*" is found in the beginning, return true. Let
* HttpURLConnection parse the mime header itself.
*
* If this isn't valid HTTP, then we don't try to parse a header
* out of the beginning of the response into the responses,
* and instead just queue up the output stream to it's very beginning.
* This seems most reasonable, and is what the NN browser does.
*/
keepAliveConnections = -1;
keepAliveTimeout = 0;
boolean ret = false;
byte[] b = new byte[8];
try {
int nread = 0;
serverInput.mark(10);
while (nread < 8) {
int r = serverInput.read(b, nread, 8 - nread);
if (r < 0) {
break;
}
nread += r;
}
String keep=null;
ret = b[0] == 'H' && b[1] == 'T'
&& b[2] == 'T' && b[3] == 'P' && b[4] == '/' &&
b[5] == '1' && b[6] == '.';
serverInput.reset();
if (ret) { // is valid HTTP - response started w/ "HTTP/1."
responses.parseHeader(serverInput);
// we've finished parsing http headers
// check if there are any applicable cookies to set (in cache)
CookieHandler cookieHandler = httpuc.getCookieHandler();
if (cookieHandler != null) {
URI uri = ParseUtil.toURI(url);
// NOTE: That cast from Map shouldn't be necessary but
// a bug in javac is triggered under certain circumstances
// So we do put the cast in as a workaround until
// it is resolved.
if (uri != null)
cookieHandler.put(uri, responses.getHeaders());
}
/* decide if we're keeping alive:
* This is a bit tricky. There's a spec, but most current
* servers (10/1/96) that support this differ in dialects.
* If the server/client misunderstand each other, the
* protocol should fall back onto HTTP/1.0, no keep-alive.
*/
if (usingProxy) { // not likely a proxy will return this
keep = responses.findValue("Proxy-Connection");
}
if (keep == null) {
keep = responses.findValue("Connection");
}
if (keep != null && keep.toLowerCase(Locale.US).equals("keep-alive")) {
/* some servers, notably Apache1.1, send something like:
* "Keep-Alive: timeout=15, max=1" which we should respect.
*/
HeaderParser p = new HeaderParser(
responses.findValue("Keep-Alive"));
if (p != null) {
/* default should be larger in case of proxy */
keepAliveConnections = p.findInt("max", usingProxy?50:5);
keepAliveTimeout = p.findInt("timeout", usingProxy?60:5);
}
} else if (b[7] != '0') {
/*
* We're talking 1.1 or later. Keep persistent until
* the server says to close.
*/
if (keep != null) {
/*
* The only Connection token we understand is close.
* Paranoia: if there is any Connection header then
* treat as non-persistent.
*/
keepAliveConnections = 1;
} else {
keepAliveConnections = 5;
}
}
} else if (nread != 8) {
if (!failedOnce && requests != null) {
failedOnce = true;
if (getRequestMethod().equals("CONNECT") ||
(httpuc.getRequestMethod().equals("POST") &&
(!retryPostProp || streaming))) {
// do not retry the request
} else {
closeServer();
cachedHttpClient = false;
openServer();
if (needsTunneling()) {
httpuc.doTunneling();
}
afterConnect();
writeRequests(requests, poster);
return parseHTTP(responses, pi, httpuc);
}
}
throw new SocketException("Unexpected end of file from server");
} else {
// we can't vouche for what this is....
responses.set("Content-type", "unknown/unknown");
}
} catch (IOException e) {
throw e;
}
int code = -1;
try {
String resp;
resp = responses.getValue(0);
/* should have no leading/trailing LWS
* expedite the typical case by assuming it has
* form "HTTP/1.x <WS> 2XX <mumble>"
*/
int ind;
ind = resp.indexOf(' ');
while(resp.charAt(ind) == ' ')
ind++;
code = Integer.parseInt(resp.substring(ind, ind + 3));
} catch (Exception e) {}
if (code == HTTP_CONTINUE && ignoreContinue) {
responses.reset();
return parseHTTPHeader(responses, pi, httpuc);
}
long cl = -1;
/*
* Set things up to parse the entity body of the reply.
* We should be smarter about avoid pointless work when
* the HTTP method and response code indicate there will be
* no entity body to parse.
*/
String te = responses.findValue("Transfer-Encoding");
if (te != null && te.equalsIgnoreCase("chunked")) {
serverInput = new ChunkedInputStream(serverInput, this, responses);
/*
* If keep alive not specified then close after the stream
* has completed.
*/
if (keepAliveConnections <= 1) {
keepAliveConnections = 1;
keepingAlive = false;
} else {
keepingAlive = true;
}
failedOnce = false;
} else {
/*
* If it's a keep alive connection then we will keep
* (alive if :-
* 1. content-length is specified, or
* 2. "Not-Modified" or "No-Content" responses - RFC 2616 states that
* 204 or 304 response must not include a message body.
*/
String cls = responses.findValue("content-length");
if (cls != null) {
try {
cl = Long.parseLong(cls);
} catch (NumberFormatException e) {
cl = -1;
}
}
String requestLine = requests.getKey(0);
if ((requestLine != null &&
(requestLine.startsWith("HEAD"))) ||
code == HttpURLConnection.HTTP_NOT_MODIFIED ||
code == HttpURLConnection.HTTP_NO_CONTENT) {
cl = 0;
}
if (keepAliveConnections > 1 &&
(cl >= 0 ||
code == HttpURLConnection.HTTP_NOT_MODIFIED ||
code == HttpURLConnection.HTTP_NO_CONTENT)) {
keepingAlive = true;
failedOnce = false;
} else if (keepingAlive) {
/* Previously we were keeping alive, and now we're not. Remove
* this from the cache (but only here, once) - otherwise we get
* multiple removes and the cache count gets messed up.
*/
keepingAlive=false;
}
}
/* wrap a KeepAliveStream/MeteredStream around it if appropriate */
if (cl > 0) {
// In this case, content length is well known, so it is okay
// to wrap the input stream with KeepAliveStream/MeteredStream.
if (pi != null) {
// Progress monitor is enabled
pi.setContentType(responses.findValue("content-type"));
}
if (isKeepingAlive()) {
// Wrap KeepAliveStream if keep alive is enabled.
logFinest("KeepAlive stream used: " + url);
serverInput = new KeepAliveStream(serverInput, pi, cl, this);
failedOnce = false;
}
else {
serverInput = new MeteredStream(serverInput, pi, cl);
}
}
else if (cl == -1) {
// In this case, content length is unknown - the input
// stream would simply be a regular InputStream or
// ChunkedInputStream.
if (pi != null) {
// Progress monitoring is enabled.
pi.setContentType(responses.findValue("content-type"));
// Wrap MeteredStream for tracking indeterministic
// progress, even if the input stream is ChunkedInputStream.
serverInput = new MeteredStream(serverInput, pi, cl);
}
else {
// Progress monitoring is disabled, and there is no
// need to wrap an unknown length input stream.
// ** This is an no-op **
}
}
else {
if (pi != null)
pi.finishTracking();
}
return ret;
}
public synchronized InputStream getInputStream() {
return serverInput;
}
public OutputStream getOutputStream() {
return serverOutput;
}
@Override
public String toString() {
return getClass().getName()+"("+url+")";
}
public final boolean isKeepingAlive() {
return getHttpKeepAliveSet() && keepingAlive;
}
public void setCacheRequest(CacheRequest cacheRequest) {
this.cacheRequest = cacheRequest;
}
CacheRequest getCacheRequest() {
return cacheRequest;
}
String getRequestMethod() {
if (requests != null) {
String requestLine = requests.getKey(0);
if (requestLine != null) {
return requestLine.split("\\s+")[0];
}
}
return "";
}
@Override
protected void finalize() throws Throwable {
// This should do nothing. The stream finalizer will
// close the fd.
}
public void setDoNotRetry(boolean value) {
// failedOnce is used to determine if a request should be retried.
failedOnce = value;
}
public void setIgnoreContinue(boolean value) {
ignoreContinue = value;
}
/* Use only on connections in error. */
@Override
public void closeServer() {
try {
keepingAlive = false;
serverSocket.close();
} catch (Exception e) {}
}
/**
* @return the proxy host being used for this client, or null
* if we're not going through a proxy
*/
public String getProxyHostUsed() {
if (!usingProxy) {
return null;
} else {
return ((InetSocketAddress)proxy.address()).getHostString();
}
}
/**
* @return the proxy port being used for this client. Meaningless
* if getProxyHostUsed() gives null.
*/
public int getProxyPortUsed() {
if (usingProxy)
return ((InetSocketAddress)proxy.address()).getPort();
return -1;
}
}
|
apache/incubator-kie-drools | 36,190 | kie-pmml-trusty/kie-pmml-models/kie-pmml-models-regression/kie-pmml-models-regression-compiler/src/test/java/org/kie/pmml/models/regression/compiler/factories/KiePMMLRegressionTableFactoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.kie.pmml.models.regression.compiler.factories;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import com.github.javaparser.ast.CompilationUnit;
import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration;
import com.github.javaparser.ast.body.MethodDeclaration;
import com.github.javaparser.ast.expr.CastExpr;
import com.github.javaparser.ast.expr.Expression;
import com.github.javaparser.ast.expr.LambdaExpr;
import com.github.javaparser.ast.expr.MethodCallExpr;
import com.github.javaparser.ast.expr.MethodReferenceExpr;
import com.github.javaparser.ast.expr.NullLiteralExpr;
import com.github.javaparser.ast.stmt.BlockStmt;
import com.github.javaparser.ast.stmt.ExpressionStmt;
import org.assertj.core.data.Offset;
import org.dmg.pmml.DataDictionary;
import org.dmg.pmml.DataField;
import org.dmg.pmml.MiningField;
import org.dmg.pmml.MiningSchema;
import org.dmg.pmml.OpType;
import org.dmg.pmml.PMML;
import org.dmg.pmml.regression.CategoricalPredictor;
import org.dmg.pmml.regression.NumericPredictor;
import org.dmg.pmml.regression.PredictorTerm;
import org.dmg.pmml.regression.RegressionModel;
import org.dmg.pmml.regression.RegressionTable;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.kie.pmml.api.enums.RESULT_FEATURE;
import org.kie.pmml.api.iinterfaces.SerializableFunction;
import org.kie.pmml.commons.model.KiePMMLOutputField;
import org.kie.pmml.compiler.api.dto.CommonCompilationDTO;
import org.kie.pmml.compiler.api.testutils.PMMLModelTestUtils;
import org.kie.pmml.compiler.commons.mocks.PMMLCompilationContextMock;
import org.kie.pmml.compiler.commons.utils.JavaParserUtils;
import org.kie.pmml.models.regression.compiler.dto.RegressionCompilationDTO;
import org.kie.pmml.models.regression.model.KiePMMLRegressionTable;
import org.kie.pmml.models.regression.model.tuples.KiePMMLTableSourceCategory;
import static java.util.stream.Collectors.groupingBy;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;
import static org.kie.pmml.commons.utils.KiePMMLModelUtils.getGeneratedClassName;
import static org.kie.pmml.commons.utils.KiePMMLModelUtils.getSanitizedVariableName;
import static org.kie.pmml.compiler.commons.testutils.CodegenTestUtils.commonValidateCompilation;
import static org.kie.pmml.compiler.commons.testutils.CodegenTestUtils.commonValidateCompilationWithImports;
import static org.kie.pmml.compiler.commons.utils.JavaParserUtils.getFromFileName;
import static org.kie.pmml.models.regression.compiler.factories.KiePMMLRegressionTableFactory.GETKIEPMML_TABLE;
import static org.kie.pmml.models.regression.compiler.factories.KiePMMLRegressionTableFactory.KIE_PMML_REGRESSION_TABLE_TEMPLATE;
import static org.kie.pmml.models.regression.compiler.factories.KiePMMLRegressionTableFactory.KIE_PMML_REGRESSION_TABLE_TEMPLATE_JAVA;
import static org.kie.pmml.models.regression.compiler.factories.KiePMMLRegressionTableFactory.SUPPORTED_NORMALIZATION_METHODS;
import static org.kie.pmml.models.regression.compiler.factories.KiePMMLRegressionTableFactory.UNSUPPORTED_NORMALIZATION_METHODS;
import static org.drools.util.FileUtils.getFileContent;
public class KiePMMLRegressionTableFactoryTest extends AbstractKiePMMLRegressionTableRegressionFactoryTest {
private static final String PACKAGE_NAME = "packagename";
private static final String TEST_01_SOURCE = "KiePMMLRegressionTableFactoryTest_01.txt";
private static final String TEST_02_SOURCE = "KiePMMLRegressionTableFactoryTest_02.txt";
private static final String TEST_03_SOURCE = "KiePMMLRegressionTableFactoryTest_03.txt";
private static final String TEST_04_SOURCE = "KiePMMLRegressionTableFactoryTest_04.txt";
private static final String TEST_05_SOURCE = "KiePMMLRegressionTableFactoryTest_05.txt";
private static final String TEST_06_SOURCE = "KiePMMLRegressionTableFactoryTest_06.txt";
private static final String TEST_07_SOURCE = "KiePMMLRegressionTableFactoryTest_07.txt";
private static CompilationUnit COMPILATION_UNIT;
private static ClassOrInterfaceDeclaration MODEL_TEMPLATE;
private static MethodDeclaration STATIC_GETTER_METHOD;
@BeforeAll
public static void setup() {
COMPILATION_UNIT = getFromFileName(KIE_PMML_REGRESSION_TABLE_TEMPLATE_JAVA);
MODEL_TEMPLATE = COMPILATION_UNIT.getClassByName(KIE_PMML_REGRESSION_TABLE_TEMPLATE).get();
STATIC_GETTER_METHOD = MODEL_TEMPLATE.getMethodsByName(GETKIEPMML_TABLE).get(0);
}
@Test
void getRegressionTables() {
regressionTable = getRegressionTable(3.5, "professional");
RegressionTable regressionTable2 = getRegressionTable(3.9, "hobby");
RegressionModel regressionModel = new RegressionModel();
regressionModel.setNormalizationMethod(RegressionModel.NormalizationMethod.CAUCHIT);
regressionModel.addRegressionTables(regressionTable, regressionTable2);
regressionModel.setModelName(getGeneratedClassName("RegressionModel"));
String targetField = "targetField";
DataField dataField = new DataField();
dataField.setName(targetField);
dataField.setOpType(OpType.CATEGORICAL);
DataDictionary dataDictionary = new DataDictionary();
dataDictionary.addDataFields(dataField);
MiningField miningField = new MiningField();
miningField.setUsageType(MiningField.UsageType.TARGET);
miningField.setName(dataField.getName());
MiningSchema miningSchema = new MiningSchema();
miningSchema.addMiningFields(miningField);
regressionModel.setMiningSchema(miningSchema);
PMML pmml = new PMML();
pmml.setDataDictionary(dataDictionary);
pmml.addModels(regressionModel);
final CommonCompilationDTO<RegressionModel> source =
CommonCompilationDTO.fromGeneratedPackageNameAndFields(PACKAGE_NAME,
pmml,
regressionModel,
new PMMLCompilationContextMock(),
"FILENAME");
final RegressionCompilationDTO compilationDTO =
RegressionCompilationDTO.fromCompilationDTORegressionTablesAndNormalizationMethod(source,
regressionModel.getRegressionTables(),
regressionModel.getNormalizationMethod());
Map<String, KiePMMLRegressionTable> retrieved =
KiePMMLRegressionTableFactory.getRegressionTables(compilationDTO);
assertThat(retrieved).isNotNull();
assertThat(retrieved).hasSameSizeAs(regressionModel.getRegressionTables());
regressionModel.getRegressionTables().forEach(regrTabl -> {
assertThat(retrieved).containsKey(regrTabl.getTargetCategory().toString());
commonEvaluateRegressionTable(retrieved.get(regrTabl.getTargetCategory().toString()), regrTabl);
});
}
@Test
void getRegressionTable() {
regressionTable = getRegressionTable(3.5, "professional");
RegressionModel regressionModel = new RegressionModel();
regressionModel.setNormalizationMethod(RegressionModel.NormalizationMethod.CAUCHIT);
regressionModel.addRegressionTables(regressionTable);
regressionModel.setModelName(getGeneratedClassName("RegressionModel"));
String targetField = "targetField";
DataField dataField = new DataField();
dataField.setName(targetField);
dataField.setOpType(OpType.CATEGORICAL);
DataDictionary dataDictionary = new DataDictionary();
dataDictionary.addDataFields(dataField);
MiningField miningField = new MiningField();
miningField.setUsageType(MiningField.UsageType.TARGET);
miningField.setName(dataField.getName());
MiningSchema miningSchema = new MiningSchema();
miningSchema.addMiningFields(miningField);
regressionModel.setMiningSchema(miningSchema);
PMML pmml = new PMML();
pmml.setDataDictionary(dataDictionary);
pmml.addModels(regressionModel);
final CommonCompilationDTO<RegressionModel> source =
CommonCompilationDTO.fromGeneratedPackageNameAndFields(PACKAGE_NAME,
pmml,
regressionModel,
new PMMLCompilationContextMock(),
"FILENAME");
final RegressionCompilationDTO compilationDTO =
RegressionCompilationDTO.fromCompilationDTORegressionTablesAndNormalizationMethod(source,
new ArrayList<>(),
regressionModel.getNormalizationMethod());
KiePMMLRegressionTable retrieved = KiePMMLRegressionTableFactory.getRegressionTable(regressionTable,
compilationDTO);
assertThat(retrieved).isNotNull();
commonEvaluateRegressionTable(retrieved, regressionTable);
}
@Test
void getRegressionTableBuilders() {
regressionTable = getRegressionTable(3.5, "professional");
RegressionModel regressionModel = new RegressionModel();
regressionModel.setNormalizationMethod(RegressionModel.NormalizationMethod.CAUCHIT);
regressionModel.addRegressionTables(regressionTable);
regressionModel.setModelName(getGeneratedClassName("RegressionModel"));
String targetField = "targetField";
DataField dataField = new DataField();
dataField.setName(targetField);
dataField.setOpType(OpType.CATEGORICAL);
DataDictionary dataDictionary = new DataDictionary();
dataDictionary.addDataFields(dataField);
MiningField miningField = new MiningField();
miningField.setUsageType(MiningField.UsageType.TARGET);
miningField.setName(dataField.getName());
MiningSchema miningSchema = new MiningSchema();
miningSchema.addMiningFields(miningField);
regressionModel.setMiningSchema(miningSchema);
PMML pmml = new PMML();
pmml.setDataDictionary(dataDictionary);
pmml.addModels(regressionModel);
final CommonCompilationDTO<RegressionModel> source =
CommonCompilationDTO.fromGeneratedPackageNameAndFields(PACKAGE_NAME,
pmml,
regressionModel,
new PMMLCompilationContextMock(),
"FILENAME");
final RegressionCompilationDTO compilationDTO =
RegressionCompilationDTO.fromCompilationDTORegressionTablesAndNormalizationMethod(source,
new ArrayList<>(),
regressionModel.getNormalizationMethod());
Map<String, KiePMMLTableSourceCategory> retrieved =
KiePMMLRegressionTableFactory.getRegressionTableBuilders(compilationDTO);
assertThat(retrieved).isNotNull();
retrieved.values().forEach(kiePMMLTableSourceCategory -> commonValidateKiePMMLRegressionTable(kiePMMLTableSourceCategory.getSource()));
}
@Test
void getRegressionTableBuilder() {
regressionTable = getRegressionTable(3.5, "professional");
RegressionModel regressionModel = new RegressionModel();
regressionModel.setNormalizationMethod(RegressionModel.NormalizationMethod.CAUCHIT);
regressionModel.addRegressionTables(regressionTable);
regressionModel.setModelName(getGeneratedClassName("RegressionModel"));
String targetField = "targetField";
DataField dataField = new DataField();
dataField.setName(targetField);
dataField.setOpType(OpType.CATEGORICAL);
DataDictionary dataDictionary = new DataDictionary();
dataDictionary.addDataFields(dataField);
MiningField miningField = new MiningField();
miningField.setUsageType(MiningField.UsageType.TARGET);
miningField.setName(dataField.getName());
MiningSchema miningSchema = new MiningSchema();
miningSchema.addMiningFields(miningField);
regressionModel.setMiningSchema(miningSchema);
PMML pmml = new PMML();
pmml.setDataDictionary(dataDictionary);
pmml.addModels(regressionModel);
final CommonCompilationDTO<RegressionModel> source =
CommonCompilationDTO.fromGeneratedPackageNameAndFields(PACKAGE_NAME,
pmml,
regressionModel,
new PMMLCompilationContextMock(),
"FILENAME");
final RegressionCompilationDTO compilationDTO =
RegressionCompilationDTO.fromCompilationDTORegressionTablesAndNormalizationMethod(source,
new ArrayList<>(),
regressionModel.getNormalizationMethod());
Map.Entry<String, String> retrieved = KiePMMLRegressionTableFactory.getRegressionTableBuilder(regressionTable
, compilationDTO);
assertThat(retrieved).isNotNull();
Map<String, String> sources = new HashMap<>();
sources.put(retrieved.getKey(), retrieved.getValue());
commonValidateCompilation(sources);
}
@Test
void getNumericPredictorsMap() {
final List<NumericPredictor> numericPredictors = IntStream.range(0, 3).mapToObj(index -> {
String predictorName = "predictorName-" + index;
double coefficient = 1.23 * index;
return PMMLModelTestUtils.getNumericPredictor(predictorName, index, coefficient);
}).collect(Collectors.toList());
Map<String, SerializableFunction<Double, Double>> retrieved =
KiePMMLRegressionTableFactory.getNumericPredictorsMap(numericPredictors);
assertThat(retrieved).hasSameSizeAs(numericPredictors);
}
@Test
void getNumericPredictorEntryWithExponent() {
String predictorName = "predictorName";
int exponent = 2;
double coefficient = 1.23;
NumericPredictor numericPredictor = PMMLModelTestUtils.getNumericPredictor(predictorName, exponent,
coefficient);
SerializableFunction<Double, Double> retrieved =
KiePMMLRegressionTableFactory.getNumericPredictorEntry(numericPredictor);
assertThat(retrieved).isNotNull();
}
@Test
void getNumericPredictorEntryWithoutExponent() {
String predictorName = "predictorName";
int exponent = 1;
double coefficient = 1.23;
NumericPredictor numericPredictor = PMMLModelTestUtils.getNumericPredictor(predictorName, exponent,
coefficient);
SerializableFunction<Double, Double> retrieved =
KiePMMLRegressionTableFactory.getNumericPredictorEntry(numericPredictor);
assertThat(retrieved).isNotNull();
}
@Test
void getCategoricalPredictorsMap() {
final List<CategoricalPredictor> categoricalPredictors = IntStream.range(0, 3).mapToObj(index ->
IntStream.range(0,
3).mapToObj(i -> {
String predictorName = "predictorName-" + index;
double coefficient = 1.23 * i;
return PMMLModelTestUtils.getCategoricalPredictor(predictorName, i, coefficient);
})
.collect(Collectors.toList())).reduce((categoricalPredictors1, categoricalPredictors2) -> {
List<CategoricalPredictor> toReturn = new ArrayList<>();
toReturn.addAll(categoricalPredictors1);
toReturn.addAll(categoricalPredictors2);
return toReturn;
}).get();
Map<String, SerializableFunction<String, Double>> retrieved =
KiePMMLRegressionTableFactory.getCategoricalPredictorsMap(categoricalPredictors);
final Map<String, List<CategoricalPredictor>> groupedCollectors = categoricalPredictors.stream()
.collect(groupingBy(categoricalPredictor ->categoricalPredictor.getField()));
assertThat(retrieved).hasSameSizeAs(groupedCollectors);
groupedCollectors.keySet().forEach(predictName -> assertThat(retrieved).containsKey(predictName));
}
@Test
void getGroupedCategoricalPredictorMap() {
final List<CategoricalPredictor> categoricalPredictors = new ArrayList<>();
for (int i = 0; i < 3; i++) {
String predictorName = "predictorName-" + i;
double coefficient = 1.23 * i;
categoricalPredictors.add(PMMLModelTestUtils.getCategoricalPredictor(predictorName, i, coefficient));
}
Map<String, Double> retrieved =
KiePMMLRegressionTableFactory.getGroupedCategoricalPredictorMap(categoricalPredictors);
assertThat(retrieved).isNotNull();
assertThat(retrieved).hasSameSizeAs(categoricalPredictors);
categoricalPredictors.forEach(categoricalPredictor ->
{
String key = categoricalPredictor.getValue().toString();
assertThat(retrieved).containsKey(key);
assertThat(retrieved.get(key)).isCloseTo(categoricalPredictor.getCoefficient().doubleValue(), Offset.offset(0.0));
});
}
@Test
void getPredictorTermsMap() {
final List<PredictorTerm> predictorTerms = IntStream.range(0, 3).mapToObj(index -> {
String predictorName = "predictorName-" + index;
double coefficient = 1.23 * index;
String fieldRef = "fieldRef-" + index;
return PMMLModelTestUtils.getPredictorTerm(predictorName, coefficient,
Collections.singletonList(fieldRef));
}).collect(Collectors.toList());
Map<String, SerializableFunction<Map<String, Object>, Double>> retrieved =
KiePMMLRegressionTableFactory.getPredictorTermsMap(predictorTerms);
assertThat(retrieved).hasSameSizeAs(predictorTerms);
IntStream.range(0, predictorTerms.size()).forEach(index -> {
PredictorTerm predictorTerm = predictorTerms.get(index);
assertThat(retrieved).containsKey(predictorTerm.getName());
});
}
@Test
void getPredictorTermSerializableFunction() {
String predictorName = "predictorName";
double coefficient = 23.12;
String fieldRef = "fieldRef";
PredictorTerm predictorTerm = PMMLModelTestUtils.getPredictorTerm(predictorName, coefficient,
Collections.singletonList(fieldRef));
SerializableFunction<Map<String, Object>, Double> retrieved =
KiePMMLRegressionTableFactory.getPredictorTermSerializableFunction(predictorTerm);
assertThat(retrieved).isNotNull();
}
@Test
void getResultUpdaterUnsupportedFunction() {
UNSUPPORTED_NORMALIZATION_METHODS.forEach(normalizationMethod ->
assertThat(KiePMMLRegressionTableFactory.getResultUpdaterFunction(normalizationMethod)).isNull());
}
@Test
void getResultUpdaterSupportedFunction() {
SUPPORTED_NORMALIZATION_METHODS.forEach(normalizationMethod ->
assertThat(KiePMMLRegressionTableFactory.getResultUpdaterFunction(normalizationMethod)).isNotNull());
}
@Test
void setStaticGetter() throws IOException {
regressionTable = getRegressionTable(3.5, "professional");
RegressionModel regressionModel = new RegressionModel();
regressionModel.setNormalizationMethod(RegressionModel.NormalizationMethod.CAUCHIT);
regressionModel.addRegressionTables(regressionTable);
regressionModel.setModelName(getGeneratedClassName("RegressionModel"));
String targetField = "targetField";
DataField dataField = new DataField();
dataField.setName(targetField);
dataField.setOpType(OpType.CATEGORICAL);
DataDictionary dataDictionary = new DataDictionary();
dataDictionary.addDataFields(dataField);
MiningField miningField = new MiningField();
miningField.setUsageType(MiningField.UsageType.TARGET);
miningField.setName(dataField.getName());
MiningSchema miningSchema = new MiningSchema();
miningSchema.addMiningFields(miningField);
regressionModel.setMiningSchema(miningSchema);
PMML pmml = new PMML();
pmml.setDataDictionary(dataDictionary);
pmml.addModels(regressionModel);
String variableName = "variableName";
final CommonCompilationDTO<RegressionModel> source =
CommonCompilationDTO.fromGeneratedPackageNameAndFields(PACKAGE_NAME,
pmml,
regressionModel,
new PMMLCompilationContextMock(),
"FILENAME");
final RegressionCompilationDTO compilationDTO =
RegressionCompilationDTO.fromCompilationDTORegressionTablesAndNormalizationMethod(source,
new ArrayList<>(),
regressionModel.getNormalizationMethod());
final MethodDeclaration staticGetterMethod = STATIC_GETTER_METHOD.clone();
KiePMMLRegressionTableFactory.setStaticGetter(regressionTable,
compilationDTO,
staticGetterMethod,
variableName);
String text = getFileContent(TEST_06_SOURCE);
MethodDeclaration expected = JavaParserUtils.parseMethod(text);
assertThat(staticGetterMethod.toString()).isEqualTo(expected.toString());
assertThat(JavaParserUtils.equalsNode(expected, staticGetterMethod)).isTrue();
List<Class<?>> imports = Arrays.asList(AtomicReference.class,
Collections.class,
Arrays.class,
List.class,
Map.class,
KiePMMLRegressionTable.class,
SerializableFunction.class);
commonValidateCompilationWithImports(staticGetterMethod, imports);
}
@Test
void getResultUpdaterExpressionWithSupportedMethods() {
SUPPORTED_NORMALIZATION_METHODS.forEach(normalizationMethod -> {
Expression retrieved =
KiePMMLRegressionTableFactory.getResultUpdaterExpression(normalizationMethod);
try {
String text = getFileContent(TEST_03_SOURCE);
Expression expected = JavaParserUtils.parseExpression(String.format(text,
normalizationMethod.name()));
assertThat(JavaParserUtils.equalsNode(expected, retrieved)).isTrue();
} catch (IOException e) {
fail(e.getMessage());
}
});
}
@Test
void getResultUpdaterExpression() {
UNSUPPORTED_NORMALIZATION_METHODS.forEach(normalizationMethod -> {
Expression retrieved =
KiePMMLRegressionTableFactory.getResultUpdaterExpression(normalizationMethod);
assertThat(retrieved).isInstanceOf(NullLiteralExpr.class);
});
}
@Test
void getResultUpdaterSupportedExpression() throws IOException {
MethodReferenceExpr retrieved =
KiePMMLRegressionTableFactory.getResultUpdaterSupportedExpression(RegressionModel.NormalizationMethod.CAUCHIT);
String text = getFileContent(TEST_03_SOURCE);
Expression expected = JavaParserUtils.parseExpression(String.format(text,
RegressionModel.NormalizationMethod.CAUCHIT.name()));
assertThat(JavaParserUtils.equalsNode(expected, retrieved)).isTrue();
}
@Test
void getNumericPredictorsExpressions() {
final List<NumericPredictor> numericPredictors = IntStream.range(0, 3).mapToObj(index -> {
String predictorName = "predictorName-" + index;
double coefficient = 1.23 * index;
return PMMLModelTestUtils.getNumericPredictor(predictorName, index, coefficient);
}).collect(Collectors.toList());
Map<String, Expression> retrieved =
KiePMMLRegressionTableFactory.getNumericPredictorsExpressions(numericPredictors);
assertThat(retrieved).hasSameSizeAs(numericPredictors);
}
@Test
void getNumericPredictorExpressionWithExponent() throws IOException {
String predictorName = "predictorName";
int exponent = 2;
double coefficient = 1.23;
NumericPredictor numericPredictor = PMMLModelTestUtils.getNumericPredictor(predictorName, exponent,
coefficient);
CastExpr retrieved = KiePMMLRegressionTableFactory.getNumericPredictorExpression(numericPredictor);
String text = getFileContent(TEST_01_SOURCE);
Expression expected = JavaParserUtils.parseExpression(String.format(text, coefficient, exponent));
assertThat(retrieved).isEqualTo(expected);
}
@Test
void getNumericPredictorExpressionWithoutExponent() throws IOException {
String predictorName = "predictorName";
int exponent = 1;
double coefficient = 1.23;
NumericPredictor numericPredictor = PMMLModelTestUtils.getNumericPredictor(predictorName, exponent,
coefficient);
CastExpr retrieved = KiePMMLRegressionTableFactory.getNumericPredictorExpression(numericPredictor);
String text = getFileContent(TEST_02_SOURCE);
Expression expected = JavaParserUtils.parseExpression(String.format(text, coefficient));
assertThat(retrieved).isEqualTo(expected);
}
@Test
void getCategoricalPredictorsExpressions() {
final List<CategoricalPredictor> categoricalPredictors = IntStream.range(0, 3).mapToObj(index ->
IntStream.range(0,
3).mapToObj(i -> {
String predictorName = "predictorName-" + index;
double coefficient = 1.23 * i;
return PMMLModelTestUtils.getCategoricalPredictor(predictorName, i, coefficient);
})
.collect(Collectors.toList())).reduce((categoricalPredictors1, categoricalPredictors2) -> {
List<CategoricalPredictor> toReturn = new ArrayList<>();
toReturn.addAll(categoricalPredictors1);
toReturn.addAll(categoricalPredictors2);
return toReturn;
}).get();
final BlockStmt body = new BlockStmt();
Map<String, Expression> retrieved =
KiePMMLRegressionTableFactory.getCategoricalPredictorsExpressions(categoricalPredictors,
body,
"variableName");
assertThat(retrieved).hasSize(3);
final Map<String, List<CategoricalPredictor>> groupedCollectors = categoricalPredictors.stream()
.collect(groupingBy(categoricalPredictor ->categoricalPredictor.getField()));
groupedCollectors.values().forEach(categoricalPredictors12 -> commonEvaluateCategoryPredictors(body,
categoricalPredictors12, "variableName"));
}
@Test
void populateWithGroupedCategoricalPredictorMap() throws IOException {
final List<CategoricalPredictor> categoricalPredictors = new ArrayList<>();
for (int i = 0; i < 3; i++) {
String predictorName = "predictorName-" + i;
double coefficient = 1.23 * i;
categoricalPredictors.add(PMMLModelTestUtils.getCategoricalPredictor(predictorName, i, coefficient));
}
final BlockStmt toPopulate = new BlockStmt();
final String categoricalPredictorMapName = "categoricalPredictorMapName";
KiePMMLRegressionTableFactory.populateWithGroupedCategoricalPredictorMap(categoricalPredictors,
toPopulate,
categoricalPredictorMapName);
String text = getFileContent(TEST_04_SOURCE);
BlockStmt expected = JavaParserUtils.parseBlock(String.format(text,
categoricalPredictorMapName,
categoricalPredictors.get(0).getValue(),
categoricalPredictors.get(0).getCoefficient(),
categoricalPredictors.get(1).getValue(),
categoricalPredictors.get(1).getCoefficient(),
categoricalPredictors.get(2).getValue(),
categoricalPredictors.get(2).getCoefficient()));
assertThat(JavaParserUtils.equalsNode(expected, toPopulate)).isTrue();
}
@Test
void getCategoricalPredictorExpression() throws IOException {
final String categoricalPredictorMapName = "categoricalPredictorMapName";
CastExpr retrieved =
KiePMMLRegressionTableFactory.getCategoricalPredictorExpression(categoricalPredictorMapName);
String text = getFileContent(TEST_05_SOURCE);
Expression expected = JavaParserUtils.parseExpression(String.format(text, categoricalPredictorMapName));
assertThat(retrieved).isEqualTo(expected);
}
@Test
void getPredictorTermFunctions() {
final List<PredictorTerm> predictorTerms = IntStream.range(0, 3).mapToObj(index -> {
String predictorName = "predictorName-" + index;
double coefficient = 1.23 * index;
String fieldRef = "fieldRef-" + index;
return PMMLModelTestUtils.getPredictorTerm(predictorName, coefficient,
Collections.singletonList(fieldRef));
}).collect(Collectors.toList());
Map<String, Expression> retrieved =
KiePMMLRegressionTableFactory.getPredictorTermFunctions(predictorTerms);
assertThat(retrieved).hasSameSizeAs(predictorTerms);
IntStream.range(0, predictorTerms.size()).forEach(index -> {
PredictorTerm predictorTerm = predictorTerms.get(index);
assertThat(retrieved).containsKey(predictorTerm.getName());
});
}
@Test
void getPredictorTermFunction() throws IOException {
String predictorName = "predictorName";
double coefficient = 23.12;
String fieldRef = "fieldRef";
PredictorTerm predictorTerm = PMMLModelTestUtils.getPredictorTerm(predictorName, coefficient,
Collections.singletonList(fieldRef));
LambdaExpr retrieved = KiePMMLRegressionTableFactory.getPredictorTermFunction(predictorTerm);
String text = getFileContent(TEST_07_SOURCE);
Expression expected = JavaParserUtils.parseExpression(String.format(text, fieldRef, coefficient));
assertThat(JavaParserUtils.equalsNode(expected, retrieved)).isTrue();
}
private void commonEvaluateRegressionTable(KiePMMLRegressionTable retrieved, RegressionTable source) {
Map<String, SerializableFunction<Double, Double>> numericFunctionMap = retrieved.getNumericFunctionMap();
assertThat(numericFunctionMap).hasSameSizeAs(source.getNumericPredictors());
source.getNumericPredictors().forEach(numericPredictor -> assertThat(numericFunctionMap).containsKey(numericPredictor.getField()));
Map<String, SerializableFunction<String, Double>> categoricalFunctionMap =
retrieved.getCategoricalFunctionMap();
Map<String, List<CategoricalPredictor>> groupedCollectors = categoricalPredictors.stream()
.collect(groupingBy(categoricalPredictor ->categoricalPredictor.getField()));
assertThat(categoricalFunctionMap).hasSameSizeAs(groupedCollectors);
groupedCollectors.keySet().forEach(categorical -> assertThat(categoricalFunctionMap).containsKey(categorical));
Map<String, SerializableFunction<Map<String, Object>, Double>> predictorTermsFunctionMap =
retrieved.getPredictorTermsFunctionMap();
assertThat(predictorTermsFunctionMap).hasSameSizeAs(source.getPredictorTerms());
source.getPredictorTerms().forEach(predictorTerm -> assertThat(predictorTermsFunctionMap).containsKey(predictorTerm.getName()));
}
private void commonEvaluateCategoryPredictors(final BlockStmt toVerify,
final List<CategoricalPredictor> categoricalPredictors,
final String variableName) {
for (int i = 0; i < categoricalPredictors.size(); i++) {
CategoricalPredictor categoricalPredictor = categoricalPredictors.get(i);
String expectedVariableName =
getSanitizedVariableName(String.format("%sMap", variableName)) + "_" + i;
assertThat(toVerify.getStatements()
.stream()
.anyMatch(statement -> {
String expected = String.format(
"%s.put(\"%s\", %s);",
expectedVariableName,
categoricalPredictor.getValue(),
categoricalPredictor.getCoefficient());
return statement instanceof ExpressionStmt &&
((ExpressionStmt) statement).getExpression() instanceof MethodCallExpr &&
statement.toString().equals(expected);
})).isTrue();
}
}
private KiePMMLOutputField getOutputField(String name, RESULT_FEATURE resultFeature, String targetField) {
return KiePMMLOutputField.builder(name, Collections.emptyList())
.withResultFeature(resultFeature)
.withTargetField(targetField)
.build();
}
} |
apache/jackrabbit-oak | 36,049 | oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureArchiveManagerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.segment.azure;
import com.azure.core.util.BinaryData;
import com.azure.storage.blob.BlobContainerClient;
import com.azure.storage.blob.models.BlobItem;
import com.azure.storage.blob.models.BlobStorageException;
import com.azure.storage.blob.models.ListBlobsOptions;
import com.azure.storage.blob.specialized.BlobLeaseClient;
import com.azure.storage.blob.specialized.BlobLeaseClientBuilder;
import com.azure.storage.blob.specialized.BlockBlobClient;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.commons.Buffer;
import org.apache.jackrabbit.oak.segment.SegmentId;
import org.apache.jackrabbit.oak.segment.SegmentNodeStore;
import org.apache.jackrabbit.oak.segment.SegmentNodeStoreBuilders;
import org.apache.jackrabbit.oak.segment.SegmentNotFoundException;
import org.apache.jackrabbit.oak.segment.file.FileStore;
import org.apache.jackrabbit.oak.segment.file.FileStoreBuilder;
import org.apache.jackrabbit.oak.segment.file.InvalidFileStoreVersionException;
import org.apache.jackrabbit.oak.segment.file.ReadOnlyFileStore;
import org.apache.jackrabbit.oak.segment.file.tar.TarPersistence;
import org.apache.jackrabbit.oak.segment.remote.WriteAccessController;
import org.apache.jackrabbit.oak.segment.spi.RepositoryNotReachableException;
import org.apache.jackrabbit.oak.segment.spi.monitor.FileStoreMonitorAdapter;
import org.apache.jackrabbit.oak.segment.spi.monitor.IOMonitorAdapter;
import org.apache.jackrabbit.oak.segment.spi.monitor.RemoteStoreMonitorAdapter;
import org.apache.jackrabbit.oak.segment.spi.persistence.SegmentArchiveManager;
import org.apache.jackrabbit.oak.segment.spi.persistence.SegmentArchiveReader;
import org.apache.jackrabbit.oak.segment.spi.persistence.SegmentArchiveWriter;
import org.apache.jackrabbit.oak.segment.spi.persistence.SegmentNodeStorePersistence;
import org.apache.jackrabbit.oak.segment.spi.persistence.persistentcache.AbstractPersistentCache;
import org.apache.jackrabbit.oak.segment.spi.persistence.persistentcache.CachingPersistence;
import org.apache.jackrabbit.oak.segment.spi.persistence.persistentcache.PersistentCache;
import org.apache.jackrabbit.oak.segment.spi.persistence.split.SplitPersistence;
import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
import org.apache.jackrabbit.oak.spi.commit.EmptyHook;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.Assert;
import org.junit.contrib.java.lang.system.ProvideSystemProperty;
import org.junit.rules.TemporaryFolder;
import org.mockito.Mockito;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URISyntaxException;
import java.security.InvalidKeyException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.TimeoutException;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.IsNot.not;
import static org.junit.Assert.*;
public class AzureArchiveManagerTest {
@ClassRule
public static AzuriteDockerRule azurite = new AzuriteDockerRule();
@Rule
public TemporaryFolder folder = new TemporaryFolder(new File("target"));
private BlobContainerClient readBlobContainerClient;
private BlobContainerClient writeBlobContainerClient;
private BlobContainerClient noRetryBlobContainerClient;
private AzurePersistence azurePersistence;
private WriteAccessController writeAccessController;
@Before
public void setup() throws BlobStorageException, InvalidKeyException, URISyntaxException {
readBlobContainerClient = azurite.getReadBlobContainerClient("oak-test");
writeBlobContainerClient = azurite.getWriteBlobContainerClient("oak-test");
noRetryBlobContainerClient = azurite.getNoRetryBlobContainerClient("oak-test");
writeAccessController = new WriteAccessController();
writeAccessController.enableWriting();
azurePersistence = new AzurePersistence(readBlobContainerClient, writeBlobContainerClient, noRetryBlobContainerClient, "oak");
azurePersistence.setWriteAccessController(writeAccessController);
}
@Rule
public final ProvideSystemProperty systemPropertyRule = new ProvideSystemProperty(AzureRepositoryLock.LEASE_DURATION_PROP, "15")
.and(AzureRepositoryLock.RENEWAL_INTERVAL_PROP, "3")
.and(AzureRepositoryLock.TIME_TO_WAIT_BEFORE_WRITE_BLOCK_PROP, "9");
@Test
public void testRecovery() throws BlobStorageException, IOException {
SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter());
SegmentArchiveWriter writer = manager.create("data00000a.tar");
List<UUID> uuids = new ArrayList<>();
for (int i = 0; i < 10; i++) {
UUID u = UUID.randomUUID();
writer.writeSegment(u.getMostSignificantBits(), u.getLeastSignificantBits(), new byte[10], 0, 10, 0, 0, false);
uuids.add(u);
}
writer.flush();
writer.close();
readBlobContainerClient.getBlobClient("oak/data00000a.tar/0005." + uuids.get(5).toString()).delete();
LinkedHashMap<UUID, byte[]> recovered = new LinkedHashMap<>();
manager.recoverEntries("data00000a.tar", recovered);
assertEquals(new ArrayList<>(uuids.subList(0, 5)), new ArrayList<>(recovered.keySet()));
}
@Test
public void testBackupWithRecoveredEntries() throws BlobStorageException, IOException {
SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter());
SegmentArchiveWriter writer = manager.create("data00000a.tar");
List<UUID> uuids = new ArrayList<>();
for (int i = 0; i < 10; i++) {
UUID u = UUID.randomUUID();
writer.writeSegment(u.getMostSignificantBits(), u.getLeastSignificantBits(), new byte[10], 0, 10, 0, 0, false);
uuids.add(u);
}
writer.flush();
writer.close();
readBlobContainerClient.getBlobClient("oak/data00000a.tar/0005." + uuids.get(5).toString()).delete();
LinkedHashMap<UUID, byte[]> recovered = new LinkedHashMap<>();
manager.recoverEntries("data00000a.tar", recovered);
manager.backup("data00000a.tar", "data00000a.tar.bak", recovered.keySet());
for (int i = 0; i <= 4; i++) {
assertTrue(readBlobContainerClient.getBlobClient("oak/data00000a.tar/000" + i + "." + uuids.get(i)).exists());
}
for (int i = 5; i <= 9; i++) {
assertFalse(String.format("Segment %s.??? should have been deleted.", "oak/data00000a.tar/000" + i), readBlobContainerClient.getBlobClient("oak/data00000a.tar/000" + i + "." + uuids.get(i)).exists());
}
}
@Test
public void testUncleanStop() throws IOException, InvalidFileStoreVersionException, CommitFailedException, BlobStorageException {
AzurePersistence p = new AzurePersistence(readBlobContainerClient, writeBlobContainerClient, noRetryBlobContainerClient, "oak");
FileStore fs = FileStoreBuilder.fileStoreBuilder(new File("target")).withCustomPersistence(p).build();
SegmentNodeStore segmentNodeStore = SegmentNodeStoreBuilders.builder(fs).build();
NodeBuilder builder = segmentNodeStore.getRoot().builder();
builder.setProperty("foo", "bar");
segmentNodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
fs.close();
readBlobContainerClient.getBlobClient("oak/data00000a.tar/closed").delete();
readBlobContainerClient.getBlobClient("oak/data00000a.tar/data00000a.tar.brf").delete();
readBlobContainerClient.getBlobClient("oak/data00000a.tar/data00000a.tar.gph").delete();
fs = FileStoreBuilder.fileStoreBuilder(new File("target")).withCustomPersistence(p).build();
segmentNodeStore = SegmentNodeStoreBuilders.builder(fs).build();
assertEquals("bar", segmentNodeStore.getRoot().getString("foo"));
fs.close();
}
@Test
// see OAK-8566
public void testUncleanStopWithEmptyArchive() throws IOException, InvalidFileStoreVersionException, CommitFailedException, BlobStorageException {
AzurePersistence p = new AzurePersistence(readBlobContainerClient, writeBlobContainerClient, noRetryBlobContainerClient, "oak");
FileStore fs = FileStoreBuilder.fileStoreBuilder(new File("target")).withCustomPersistence(p).build();
SegmentNodeStore segmentNodeStore = SegmentNodeStoreBuilders.builder(fs).build();
NodeBuilder builder = segmentNodeStore.getRoot().builder();
builder.setProperty("foo", "bar");
segmentNodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
fs.close();
// make sure there are 2 archives
fs = FileStoreBuilder.fileStoreBuilder(new File("target")).withCustomPersistence(p).build();
segmentNodeStore = SegmentNodeStoreBuilders.builder(fs).build();
builder = segmentNodeStore.getRoot().builder();
builder.setProperty("foo2", "bar2");
segmentNodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
fs.close();
// remove the segment 0000 from the second archive
ListBlobsOptions listBlobsOptions = new ListBlobsOptions();
listBlobsOptions.setPrefix("oak/data00001a.tar/0000.");
BlobItem blobItem = readBlobContainerClient.listBlobs(listBlobsOptions, null).iterator().next();
readBlobContainerClient.getBlobClient(blobItem.getName()).delete();
readBlobContainerClient.getBlobClient("oak/data00001a.tar/closed").delete();
fs = FileStoreBuilder.fileStoreBuilder(new File("target")).withCustomPersistence(p).build();
segmentNodeStore = SegmentNodeStoreBuilders.builder(fs).build();
assertEquals("bar", segmentNodeStore.getRoot().getString("foo"));
fs.close();
}
@Test
public void testUncleanStopSegmentMissing() throws IOException, InvalidFileStoreVersionException, CommitFailedException, BlobStorageException {
AzurePersistence p = new AzurePersistence(readBlobContainerClient, writeBlobContainerClient, noRetryBlobContainerClient, "oak");
FileStore fs = FileStoreBuilder.fileStoreBuilder(new File("target")).withCustomPersistence(p).build();
SegmentNodeStore segmentNodeStore = SegmentNodeStoreBuilders.builder(fs).build();
NodeBuilder builder = segmentNodeStore.getRoot().builder();
builder.setProperty("foo", "bar");
segmentNodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
fs.close();
// make sure there are 2 archives
fs = FileStoreBuilder.fileStoreBuilder(new File("target")).withCustomPersistence(p).build();
segmentNodeStore = SegmentNodeStoreBuilders.builder(fs).build();
builder = segmentNodeStore.getRoot().builder();
builder.setProperty("foo0", "bar0");
segmentNodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
fs.flush();
//create segment 0001
builder.setProperty("foo1", "bar1");
segmentNodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
fs.flush();
//create segment 0002
builder.setProperty("foo2", "bar2");
segmentNodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
fs.flush();
//create segment 0003
builder.setProperty("foo3", "bar3");
segmentNodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
fs.flush();
fs.close();
// remove the segment 0002 from the second archive
ListBlobsOptions listOptions = new ListBlobsOptions();
listOptions.setPrefix("oak/data00001a.tar/0002.");
BlobItem blobItem = readBlobContainerClient.listBlobs(listOptions, null).stream().iterator().next();
readBlobContainerClient.getBlobClient(blobItem.getName()).getBlockBlobClient().delete();
readBlobContainerClient.getBlobClient("oak/data00001a.tar/closed").delete();
fs = FileStoreBuilder.fileStoreBuilder(new File("target")).withCustomPersistence(p).build();
segmentNodeStore = SegmentNodeStoreBuilders.builder(fs).build();
assertEquals("bar", segmentNodeStore.getRoot().getString("foo"));
//recovered archive data00001a.tar should not contain segments 0002 and 0003
listOptions.setPrefix("oak/data00001a.tar/0002.");
assertFalse(readBlobContainerClient.listBlobs(listOptions, null).iterator().hasNext());
listOptions.setPrefix("oak/data00001a.tar/0003.");
assertFalse(readBlobContainerClient.listBlobs(listOptions, null).iterator().hasNext());
listOptions.setPrefix("oak/data00001a.tar.bak");
assertTrue("Backup directory should have been created", readBlobContainerClient.listBlobs(listOptions, null).iterator().hasNext());
//backup has all segments but 0002 since it was deleted before recovery
listOptions.setPrefix("oak/data00001a.tar.bak/0001.");
assertTrue(readBlobContainerClient.listBlobs(listOptions, null).iterator().hasNext());
listOptions.setPrefix("oak/data00001a.tar.bak/0002.");
assertFalse(readBlobContainerClient.listBlobs(listOptions, null).iterator().hasNext());
listOptions.setPrefix("oak/data00001a.tar.bak/0003.");
assertTrue(readBlobContainerClient.listBlobs(listOptions, null).iterator().hasNext());
//verify content from recovered segments preserved
assertEquals("bar1", segmentNodeStore.getRoot().getString("foo1"));
//content from deleted segments not preserved
assertNull(segmentNodeStore.getRoot().getString("foo2"));
assertNull(segmentNodeStore.getRoot().getString("foo3"));
fs.close();
}
@Test
public void testExists() throws IOException {
SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter());
SegmentArchiveWriter writer = manager.create("data00000a.tar");
List<UUID> uuids = new ArrayList<>();
for (int i = 0; i < 10; i++) {
UUID u = UUID.randomUUID();
writer.writeSegment(u.getMostSignificantBits(), u.getLeastSignificantBits(), new byte[10], 0, 10, 0, 0, false);
uuids.add(u);
}
writer.flush();
writer.close();
Assert.assertTrue(manager.exists("data00000a.tar"));
Assert.assertFalse(manager.exists("data00001a.tar"));
}
@Test
public void testArchiveExistsAfterFlush() throws IOException {
SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter());
SegmentArchiveWriter writer = manager.create("data00000a.tar");
Assert.assertFalse(manager.exists("data00000a.tar"));
UUID u = UUID.randomUUID();
writer.writeSegment(u.getMostSignificantBits(), u.getLeastSignificantBits(), new byte[10], 0, 10, 0, 0, false);
writer.flush();
Assert.assertTrue(manager.exists("data00000a.tar"));
}
@Test(expected = FileNotFoundException.class)
public void testSegmentDeletedAfterCreatingReader() throws IOException, BlobStorageException {
SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter());
SegmentArchiveWriter writer = manager.create("data00000a.tar");
Assert.assertFalse(manager.exists("data00000a.tar"));
UUID u = UUID.randomUUID();
writer.writeSegment(u.getMostSignificantBits(), u.getLeastSignificantBits(), new byte[10], 0, 10, 0, 0, false);
writer.flush();
writer.close();
SegmentArchiveReader reader = manager.open("data00000a.tar");
Buffer segment = reader.readSegment(u.getMostSignificantBits(), u.getLeastSignificantBits());
assertNotNull(segment);
ListBlobsOptions listOptions = new ListBlobsOptions();
listOptions.setPrefix("oak/data00000a.tar/0000.");
BlobItem segment0000 = readBlobContainerClient.listBlobs(listOptions, null).iterator().next();
readBlobContainerClient.getBlobClient(segment0000.getName()).delete();
try {
// FileNotFoundException should be thrown here
reader.readSegment(u.getMostSignificantBits(), u.getLeastSignificantBits());
fail();
} catch (RepositoryNotReachableException e) {
fail();
}
}
@Test(expected = SegmentNotFoundException.class)
public void testMissingSegmentDetectedInFileStore() throws IOException, BlobStorageException, InvalidFileStoreVersionException {
AzurePersistence azurePersistence = new AzurePersistence(readBlobContainerClient, writeBlobContainerClient, noRetryBlobContainerClient, "oak");
FileStore fileStore = FileStoreBuilder.fileStoreBuilder(new File("target")).withCustomPersistence(azurePersistence).build();
SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter());
SegmentArchiveWriter writer = manager.create("data00000a.tar");
//Assert.assertFalse(manager.exists("data00000a.tar"));
UUID u = UUID.randomUUID();
writer.writeSegment(u.getMostSignificantBits(), u.getLeastSignificantBits(), new byte[10], 0, 10, 0, 0, false);
writer.flush();
writer.close();
SegmentArchiveReader reader = manager.open("data00000a.tar");
Buffer segment = reader.readSegment(u.getMostSignificantBits(), u.getLeastSignificantBits());
assertNotNull(segment);
ListBlobsOptions listOptions = new ListBlobsOptions();
listOptions.setPrefix("oak/data00000a.tar/0000.");
BlobItem segment0000 = readBlobContainerClient.listBlobs(listOptions, null).iterator().next();
readBlobContainerClient.getBlobClient(segment0000.getName()).delete();
// SegmentNotFoundException should be thrown here
fileStore.readSegment(new SegmentId(fileStore, u.getMostSignificantBits(), u.getLeastSignificantBits()));
}
@Test
public void testReadOnlyRecovery() throws InvalidFileStoreVersionException, IOException, CommitFailedException, BlobStorageException {
AzurePersistence rwPersistence = new AzurePersistence(readBlobContainerClient, writeBlobContainerClient, noRetryBlobContainerClient, "oak");
FileStore rwFileStore = FileStoreBuilder.fileStoreBuilder(new File("target")).withCustomPersistence(rwPersistence).build();
SegmentNodeStore segmentNodeStore = SegmentNodeStoreBuilders.builder(rwFileStore).build();
NodeBuilder builder = segmentNodeStore.getRoot().builder();
builder.setProperty("foo", "bar");
segmentNodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
rwFileStore.flush();
ListBlobsOptions listOptions = new ListBlobsOptions();
listOptions.setPrefix("oak/data00000a.tar");
assertTrue(readBlobContainerClient.listBlobs(listOptions, null).iterator().hasNext());
listOptions.setPrefix("oak/data00000a.tar.ro.bak");
assertFalse(readBlobContainerClient.listBlobs(listOptions, null).iterator().hasNext());
// create read-only FS
AzurePersistence roPersistence = new AzurePersistence(readBlobContainerClient, writeBlobContainerClient, noRetryBlobContainerClient, "oak");
ReadOnlyFileStore roFileStore = FileStoreBuilder.fileStoreBuilder(new File("target")).withCustomPersistence(roPersistence).buildReadOnly();
PropertyState fooProperty = SegmentNodeStoreBuilders.builder(roFileStore).build()
.getRoot()
.getProperty("foo");
assertThat(fooProperty, not(nullValue()));
assertThat(fooProperty.getValue(Type.STRING), equalTo("bar"));
roFileStore.close();
rwFileStore.close();
listOptions.setPrefix("oak/data00000a.tar");
assertTrue(readBlobContainerClient.listBlobs(listOptions, null).iterator().hasNext());
// after creating a read-only FS, the recovery procedure should not be started since there is another running Oak process
listOptions.setPrefix("oak/data00000a.tar.ro.bak");
assertFalse(readBlobContainerClient.listBlobs(listOptions, null).iterator().hasNext());
}
@Test
public void testCachingPersistenceTarRecovery() throws InvalidFileStoreVersionException, IOException, CommitFailedException, BlobStorageException {
AzurePersistence rwPersistence = new AzurePersistence(readBlobContainerClient, writeBlobContainerClient, noRetryBlobContainerClient, "oak");
FileStore rwFileStore = FileStoreBuilder.fileStoreBuilder(folder.newFolder()).withCustomPersistence(rwPersistence).build();
SegmentNodeStore segmentNodeStore = SegmentNodeStoreBuilders.builder(rwFileStore).build();
NodeBuilder builder = segmentNodeStore.getRoot().builder();
builder.setProperty("foo", "bar");
segmentNodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
rwFileStore.flush();
ListBlobsOptions listOptions = new ListBlobsOptions();
listOptions.setPrefix("oak/data00000a.tar");
assertTrue(readBlobContainerClient.listBlobs(listOptions, null).iterator().hasNext());
listOptions.setPrefix("oak/data00000a.tar.ro.bak");
assertFalse(readBlobContainerClient.listBlobs(listOptions, null).iterator().hasNext());
// create files store with split persistence
AzurePersistence azureSharedPersistence = new AzurePersistence(readBlobContainerClient, writeBlobContainerClient, noRetryBlobContainerClient, "oak");
CachingPersistence cachingPersistence = new CachingPersistence(createPersistenceCache(), azureSharedPersistence);
File localFolder = folder.newFolder();
SegmentNodeStorePersistence localPersistence = new TarPersistence(localFolder);
SegmentNodeStorePersistence splitPersistence = new SplitPersistence(cachingPersistence, localPersistence);
// exception should not be thrown here
FileStore splitPersistenceFileStore = FileStoreBuilder.fileStoreBuilder(localFolder).withCustomPersistence(splitPersistence).build();
listOptions.setPrefix("oak/data00000a.tar");
assertTrue(readBlobContainerClient.listBlobs(listOptions, null).iterator().hasNext());
// after creating a read-only FS, the recovery procedure should not be started since there is another running Oak process
listOptions.setPrefix("oak/data00000a.tar.ro.bak");
assertFalse(readBlobContainerClient.listBlobs(listOptions, null).iterator().hasNext());
}
@Test
public void testCollectBlobReferencesForReadOnlyFileStore() throws InvalidFileStoreVersionException, IOException, CommitFailedException, BlobStorageException {
AzurePersistence rwPersistence = new AzurePersistence(readBlobContainerClient, writeBlobContainerClient, noRetryBlobContainerClient, "oak");
try (FileStore rwFileStore = FileStoreBuilder.fileStoreBuilder(new File("target")).withCustomPersistence(rwPersistence).build()) {
SegmentNodeStore segmentNodeStore = SegmentNodeStoreBuilders.builder(rwFileStore).build();
NodeBuilder builder = segmentNodeStore.getRoot().builder();
builder.setProperty("foo", "bar");
segmentNodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
rwFileStore.flush();
// file with binary references is not created yet
assertFalse("brf file should not be present", readBlobContainerClient.getBlobClient("oak/data00000a.tar/data00000a.tar.brf").exists());
// create read-only FS, while the rw FS is still open
AzurePersistence roPersistence = new AzurePersistence(readBlobContainerClient, writeBlobContainerClient, noRetryBlobContainerClient, "oak");
try (ReadOnlyFileStore roFileStore = FileStoreBuilder.fileStoreBuilder(new File("target")).withCustomPersistence(roPersistence).buildReadOnly()) {
PropertyState fooProperty = SegmentNodeStoreBuilders.builder(roFileStore).build()
.getRoot()
.getProperty("foo");
assertThat(fooProperty, not(nullValue()));
assertThat(fooProperty.getValue(Type.STRING), equalTo("bar"));
assertDoesNotThrow(() -> roFileStore.collectBlobReferences(s -> {
}));
}
}
}
@Test
public void testCollectBlobReferencesDoesNotFailWhenFileIsMissing() throws InvalidFileStoreVersionException, IOException, CommitFailedException, BlobStorageException {
AzurePersistence rwPersistence = new AzurePersistence(readBlobContainerClient, writeBlobContainerClient, noRetryBlobContainerClient, "oak");
try (FileStore rwFileStore = FileStoreBuilder.fileStoreBuilder(new File("target")).withCustomPersistence(rwPersistence).build()) {
SegmentNodeStore segmentNodeStore = SegmentNodeStoreBuilders.builder(rwFileStore).build();
NodeBuilder builder = segmentNodeStore.getRoot().builder();
builder.setProperty("foo", "bar");
segmentNodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
rwFileStore.flush();
// file with binary references is not created yet
assertFalse("brf file should not be present", readBlobContainerClient.getBlobClient("oak/data00000a.tar/data00000a.tar.brf").exists());
// create read-only FS, while the rw FS is still open
AzurePersistence roPersistence = new AzurePersistence(readBlobContainerClient, writeBlobContainerClient, noRetryBlobContainerClient, "oak");
try (ReadOnlyFileStore roFileStore = FileStoreBuilder.fileStoreBuilder(new File("target")).withCustomPersistence(roPersistence).buildReadOnly()) {
PropertyState fooProperty = SegmentNodeStoreBuilders.builder(roFileStore).build()
.getRoot()
.getProperty("foo");
assertThat(fooProperty, not(nullValue()));
assertThat(fooProperty.getValue(Type.STRING), equalTo("bar"));
HashSet<String> references = new HashSet<>();
assertDoesNotThrow(() ->
roFileStore.collectBlobReferences(references::add));
assertTrue("No references should have been collected since reference file has not been created", references.isEmpty());
}
}
}
@Test
public void testWriteAfterLosingRepoLock() throws Exception {
BlobContainerClient oakDirectory = readBlobContainerClient.getBlobClient("oak").getContainerClient();
BlobContainerClient writeOakDirectory = writeBlobContainerClient.getBlobClient("oak").getContainerClient();
BlobContainerClient noRetryOakDirectory = noRetryBlobContainerClient.getBlobClient("oak").getContainerClient();
AzurePersistence rwPersistence = new AzurePersistence(oakDirectory, writeOakDirectory, noRetryOakDirectory, "");
BlockBlobClient blob = readBlobContainerClient.getBlobClient("oak/repo.lock").getBlockBlobClient();
BlobLeaseClient leaseClient = new BlobLeaseClientBuilder().blobClient(blob).buildClient();
BlockBlobClient blobMocked = Mockito.spy(blob);
BlobLeaseClient blobLeaseMocked = Mockito.spy(leaseClient);
Mockito
.doCallRealMethod()
.when(blobLeaseMocked).renewLease();
AzurePersistence mockedRwPersistence = Mockito.spy(rwPersistence);
AzureRepositoryLock azureRepositoryLock = new AzureRepositoryLock(blobMocked, blobLeaseMocked, () -> {
}, writeAccessController);
AzureArchiveManager azureArchiveManager = new AzureArchiveManager(oakDirectory, writeOakDirectory, "", new IOMonitorAdapter(), new FileStoreMonitorAdapter(), writeAccessController);
Mockito
.doAnswer(invocation -> azureRepositoryLock.lock())
.when(mockedRwPersistence).lockRepository();
Mockito
.doReturn(azureArchiveManager)
.when(mockedRwPersistence).createArchiveManager(Mockito.anyBoolean(), Mockito.anyBoolean(), Mockito.any(), Mockito.any(), Mockito.any());
Mockito
.doReturn(new AzureJournalFile(oakDirectory, writeOakDirectory, "journal.log", writeAccessController))
.when(mockedRwPersistence).getJournalFile();
FileStore rwFileStore = FileStoreBuilder.fileStoreBuilder(folder.newFolder()).withCustomPersistence(mockedRwPersistence).build();
SegmentNodeStore segmentNodeStore = SegmentNodeStoreBuilders.builder(rwFileStore).build();
NodeBuilder builder = segmentNodeStore.getRoot().builder();
// simulate operation timeout when trying to renew lease
Mockito.reset(blobMocked);
BlobStorageException storageException =
//new BlobStorageException("operation timeout", BlobErrorCode.OPERATION_TIMED_OUT, new TimeoutException());
new BlobStorageException("operation timeout", null, new TimeoutException());
Mockito.doThrow(storageException).when(blobLeaseMocked).renewLease();
// wait till lease expires
Thread.sleep(17000);
// try updating repository
Thread thread = new Thread(() -> {
try {
builder.setProperty("foo", "bar");
segmentNodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
rwFileStore.flush();
} catch (Exception e) {
fail("No Exception expected, but got: " + e.getMessage());
}
});
thread.start();
Thread.sleep(2000);
// It should be possible to start another RW file store.
FileStore rwFileStore2 = FileStoreBuilder.fileStoreBuilder(folder.newFolder()).withCustomPersistence(new AzurePersistence(oakDirectory, writeOakDirectory, noRetryOakDirectory, "")).build();
SegmentNodeStore segmentNodeStore2 = SegmentNodeStoreBuilders.builder(rwFileStore2).build();
NodeBuilder builder2 = segmentNodeStore2.getRoot().builder();
//repository hasn't been updated
assertNull(builder2.getProperty("foo"));
rwFileStore2.close();
}
@Test
public void testListArchivesDoesNotReturnDeletedArchive() throws IOException, BlobStorageException {
// The archive manager should not return the archive which has "deleted" marker
SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter());
// Create an archive
createArchive(manager, "data00000a.tar");
// Verify the archive is listed
List<String> archives = manager.listArchives();
assertTrue("Archive should be listed before deletion", archives.contains("data00000a.tar"));
// Upload deleted marker for the archive
writeBlobContainerClient.getBlobClient("oak/data00000a.tar/deleted").getBlockBlobClient().upload(BinaryData.fromBytes(new byte[0]));
// Verify the archive is no longer listed after adding deleted marker
archives = manager.listArchives();
assertFalse("Archive should not be listed after deleted marker is uploaded", archives.contains("data00000a.tar"));
}
@Test
public void testListArchiveWithDeleteMarkerPresentWithWriteAccess() throws Exception{
SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter());
createArchive(manager, "data00000a.tar");
writeBlobContainerClient.getBlobClient("oak/data00000a.tar/deleted").getBlockBlobClient().upload(BinaryData.fromBytes(new byte[0]));
List<String> archives = manager.listArchives();
assertFalse("Archive should not be listed after deleted marker is uploaded", archives.contains("data00000a.tar"));
assertFalse("Archive should be deleted", readBlobContainerClient.listBlobs(new ListBlobsOptions().setPrefix("oak/data00000a.tar"), null).iterator().hasNext());
}
@Test
public void testListArchiveWithDeleteMarkerPresentAndNoWriteAccess() throws Exception{
SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter());
createArchive(manager, "data00000a.tar");
writeBlobContainerClient.getBlobClient("oak/data00000a.tar/deleted").getBlockBlobClient().upload(BinaryData.fromBytes(new byte[0]));
// disable writing
writeAccessController.disableWriting();
List<String> archives = manager.listArchives();
assertFalse("Archive should not be listed after deleted marker is uploaded", archives.contains("data00000a.tar"));
assertTrue("Archive should not be deleted", readBlobContainerClient.listBlobs(new ListBlobsOptions().setPrefix("oak/data00000a.tar"), null).iterator().hasNext());
}
private void createArchive(SegmentArchiveManager manager, String archiveName) throws IOException {
SegmentArchiveWriter writer = manager.create(archiveName);
UUID u = UUID.randomUUID();
writer.writeSegment(u.getMostSignificantBits(), u.getLeastSignificantBits(), new byte[10], 0, 10, 0, 0, false);
writer.flush();
writer.close();
}
private PersistentCache createPersistenceCache() {
return new AbstractPersistentCache() {
@Override
protected Buffer readSegmentInternal(long msb, long lsb) {
return null;
}
@Override
public boolean containsSegment(long msb, long lsb) {
return false;
}
@Override
public void writeSegment(long msb, long lsb, Buffer buffer) {
}
@Override
public void cleanUp() {
}
};
}
private static void assertDoesNotThrow(Executable executable) {
try {
executable.execute();
} catch (Exception e) {
fail("No Exception expected, but got: " + e.getMessage());
}
}
interface Executable {
void execute() throws Exception;
}
}
|
apache/lucene | 35,622 | lucene/core/src/java/org/apache/lucene/codecs/lucene103/blocktree/SegmentTermsEnum.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.codecs.lucene103.blocktree;
import java.io.IOException;
import java.io.PrintStream;
import java.util.Arrays;
import org.apache.lucene.codecs.BlockTermState;
import org.apache.lucene.index.BaseTermsEnum;
import org.apache.lucene.index.ImpactsEnum;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.TermState;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.IOBooleanSupplier;
import org.apache.lucene.util.RamUsageEstimator;
/** Iterates through terms in this field. */
final class SegmentTermsEnum extends BaseTermsEnum {
// Lazy init:
IndexInput in;
private SegmentTermsEnumFrame[] stack = new SegmentTermsEnumFrame[0];
private final SegmentTermsEnumFrame staticFrame;
SegmentTermsEnumFrame currentFrame;
boolean termExists;
final FieldReader fr;
private int targetBeforeCurrentLength;
// static boolean DEBUG = BlockTreeTermsWriter.DEBUG;
// What prefix of the current term was present in the index; when we only next() through the
// index, this stays at 0. It's only set when
// we seekCeil/Exact:
private int validIndexPrefix;
// assert only:
private boolean eof;
final BytesRefBuilder term = new BytesRefBuilder();
private final TrieReader trieReader;
private TrieReader.Node[] nodes = new TrieReader.Node[1];
public SegmentTermsEnum(FieldReader fr, TrieReader reader) throws IOException {
this.fr = fr;
// Used to hold seek by TermState, or cached seek
staticFrame = new SegmentTermsEnumFrame(this, -1);
trieReader = reader;
currentFrame = staticFrame;
nodes[0] = trieReader.root;
// currentFrame = pushFrame(arc, rootCode, 0);
// currentFrame.loadBlock();
validIndexPrefix = 0;
// if (DEBUG) {
// System.out.println("init frame state " + currentFrame.ord);
// printSeekState();
// }
// System.out.println();
// computeBlockStats().print(System.out);
}
// Not private to avoid synthetic access$NNN methods
void initIndexInput() {
if (this.in == null) {
this.in = fr.parent.termsIn.clone();
}
}
/** Runs next() through the entire terms dict, computing aggregate statistics. */
public Stats computeBlockStats() throws IOException {
Stats stats = new Stats(fr.parent.segment, fr.fieldInfo.name);
currentFrame = staticFrame;
TrieReader.Node node = nodes[0] = trieReader.root;
// Empty string prefix must have an output in the
// index!
currentFrame = pushFrame(node, 0);
currentFrame.fpOrig = currentFrame.fp;
currentFrame.loadBlock();
validIndexPrefix = 0;
stats.startBlock(currentFrame, !currentFrame.isLastInFloor);
allTerms:
while (true) {
// Pop finished blocks
while (currentFrame.nextEnt == currentFrame.entCount) {
stats.endBlock(currentFrame);
if (!currentFrame.isLastInFloor) {
// Advance to next floor block
currentFrame.loadNextFloorBlock();
stats.startBlock(currentFrame, true);
break;
} else {
if (currentFrame.ord == 0) {
break allTerms;
}
final long lastFP = currentFrame.fpOrig;
currentFrame = stack[currentFrame.ord - 1];
assert lastFP == currentFrame.lastSubFP;
// if (DEBUG) {
// System.out.println(" reset validIndexPrefix=" + validIndexPrefix);
// }
}
}
while (true) {
if (currentFrame.next()) {
// Push to new block:
currentFrame = pushFrame(null, currentFrame.lastSubFP, term.length());
currentFrame.fpOrig = currentFrame.fp;
// This is a "next" frame -- even if it's
// floor'd we must pretend it isn't so we don't
// try to scan to the right floor frame:
currentFrame.loadBlock();
stats.startBlock(currentFrame, !currentFrame.isLastInFloor);
} else {
stats.term(term.get());
break;
}
}
}
stats.finish();
// Put root frame back:
currentFrame = staticFrame;
node = nodes[0] = trieReader.root;
// Empty string prefix must have an output in the index!
assert node.hasOutput();
currentFrame = pushFrame(node, 0);
currentFrame.rewind();
currentFrame.loadBlock();
validIndexPrefix = 0;
term.clear();
return stats;
}
private SegmentTermsEnumFrame getFrame(int ord) throws IOException {
if (ord >= stack.length) {
final SegmentTermsEnumFrame[] next =
new SegmentTermsEnumFrame
[ArrayUtil.oversize(1 + ord, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
System.arraycopy(stack, 0, next, 0, stack.length);
for (int stackOrd = stack.length; stackOrd < next.length; stackOrd++) {
next[stackOrd] = new SegmentTermsEnumFrame(this, stackOrd);
}
stack = next;
}
assert stack[ord].ord == ord;
return stack[ord];
}
private TrieReader.Node getNode(int ord) {
if (ord >= nodes.length) {
final TrieReader.Node[] next =
new TrieReader.Node[ArrayUtil.oversize(1 + ord, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
System.arraycopy(nodes, 0, next, 0, nodes.length);
for (int nodeOrd = nodes.length; nodeOrd < next.length; nodeOrd++) {
next[nodeOrd] = new TrieReader.Node();
}
nodes = next;
}
return nodes[ord];
}
// Pushes a frame we seek'd to
SegmentTermsEnumFrame pushFrame(TrieReader.Node node, int length) throws IOException {
final SegmentTermsEnumFrame f = getFrame(1 + currentFrame.ord);
f.hasTerms = node.hasTerms;
f.hasTermsOrig = f.hasTerms;
f.isFloor = node.isFloor();
if (f.isFloor) {
f.setFloorData(node.floorData(trieReader));
}
pushFrame(node, node.outputFp, length);
return f;
}
// Pushes next'd frame or seek'd frame; we later
// lazy-load the frame only when needed
SegmentTermsEnumFrame pushFrame(TrieReader.Node node, long fp, int length) throws IOException {
final SegmentTermsEnumFrame f = getFrame(1 + currentFrame.ord);
f.node = node;
if (f.fpOrig == fp && f.nextEnt != -1) {
// if (DEBUG) System.out.println(" push reused frame ord=" + f.ord + " fp=" + f.fp +
// " isFloor?=" + f.isFloor + " hasTerms=" + f.hasTerms + " pref=" + term + " nextEnt=" +
// f.nextEnt + " targetBeforeCurrentLength=" + targetBeforeCurrentLength + " term.length=" +
// term.length + " vs prefix=" + f.prefix);
// if (f.prefix > targetBeforeCurrentLength) {
if (f.ord > targetBeforeCurrentLength) {
f.rewind();
} else {
// if (DEBUG) {
// System.out.println(" skip rewind!");
// }
}
assert length == f.prefixLength;
} else {
f.nextEnt = -1;
f.prefixLength = length;
f.state.termBlockOrd = 0;
f.fpOrig = f.fp = fp;
f.lastSubFP = -1;
// if (DEBUG) {
// final int sav = term.length;
// term.length = length;
// System.out.println(" push new frame ord=" + f.ord + " fp=" + f.fp + " hasTerms=" +
// f.hasTerms + " isFloor=" + f.isFloor + " pref=" + ToStringUtils.bytesRefToString(term));
// term.length = sav;
// }
}
return f;
}
// asserts only
private boolean clearEOF() {
eof = false;
return true;
}
// asserts only
private boolean setEOF() {
eof = true;
return true;
}
private IOBooleanSupplier prepareSeekExact(BytesRef target, boolean prefetch) throws IOException {
if (fr.size() > 0 && (target.compareTo(fr.getMin()) < 0 || target.compareTo(fr.getMax()) > 0)) {
return null;
}
term.grow(1 + target.length);
assert clearEOF();
// if (DEBUG) {
// System.out.println("\nBTTR.seekExact seg=" + fr.parent.segment + " target=" +
// fr.fieldInfo.name + ":" + ToStringUtils.bytesRefToString(target) + " current=" +
// ToStringUtils.bytesRefToString(term) +
// " (exists?=" + termExists + ") validIndexPrefix=" + validIndexPrefix);
// printSeekState(System.out);
// }
TrieReader.Node node;
int targetUpto;
targetBeforeCurrentLength = currentFrame.ord;
if (currentFrame != staticFrame) {
// We are already seek'd; find the common
// prefix of new seek term vs current term and
// re-use the corresponding seek state. For
// example, if app first seeks to foobar, then
// seeks to foobaz, we can re-use the seek state
// for the first 5 bytes.
// if (DEBUG) {
// System.out.println(" re-use current seek state validIndexPrefix=" + validIndexPrefix);
// }
node = nodes[0];
assert node.hasOutput();
targetUpto = 0;
SegmentTermsEnumFrame lastFrame = stack[0];
assert validIndexPrefix <= term.length();
final int targetLimit = Math.min(target.length, validIndexPrefix);
int cmp = 0;
// First compare up to valid seek frames:
while (targetUpto < targetLimit) {
cmp = (term.byteAt(targetUpto) & 0xFF) - (target.bytes[target.offset + targetUpto] & 0xFF);
// if (DEBUG) {
// System.out.println(" cycle targetUpto=" + targetUpto + " (vs limit=" + targetLimit
// + ") cmp=" + cmp + " (targetLabel=" + (char) (target.bytes[target.offset + targetUpto]) +
// " vs termLabel=" + (char) (term.bytes[targetUpto]) + ")" + " node.output=" +
// node.output
// + " output=" + output);
// }
if (cmp != 0) {
break;
}
node = nodes[1 + targetUpto];
assert node.label == (target.bytes[target.offset + targetUpto] & 0xFF)
: "node.label="
+ (char) node.label
+ " targetLabel="
+ (char) (target.bytes[target.offset + targetUpto] & 0xFF);
if (node.hasOutput()) {
lastFrame = stack[1 + lastFrame.ord];
}
targetUpto++;
}
if (cmp == 0) {
// Second compare the rest of the term, but
// don't save node/output/frame; we only do this
// to find out if the target term is before,
// equal or after the current term
cmp =
Arrays.compareUnsigned(
term.bytes(),
targetUpto,
term.length(),
target.bytes,
target.offset + targetUpto,
target.offset + target.length);
}
if (cmp < 0) {
// Common case: target term is after current
// term, ie, app is seeking multiple terms
// in sorted order
// if (DEBUG) {
// System.out.println(" target is after current (shares prefixLen=" + targetUpto + ");
// frame.ord=" + lastFrame.ord);
// }
currentFrame = lastFrame;
} else if (cmp > 0) {
// Uncommon case: target term
// is before current term; this means we can
// keep the currentFrame but we must rewind it
// (so we scan from the start)
targetBeforeCurrentLength = lastFrame.ord;
// if (DEBUG) {
// System.out.println(" target is before current (shares prefixLen=" + targetUpto + ");
// rewind frame ord=" + lastFrame.ord);
// }
currentFrame = lastFrame;
currentFrame.rewind();
} else {
// Target is exactly the same as current term
assert term.length() == target.length;
if (termExists) {
// if (DEBUG) {
// System.out.println(" target is same as current; return true");
// }
return () -> true;
} else {
// if (DEBUG) {
// System.out.println(" target is same as current but term doesn't exist");
// }
}
// validIndexPrefix = currentFrame.depth;
// term.length = target.length;
// return termExists;
}
} else {
targetBeforeCurrentLength = -1;
node = trieReader.root;
// Empty string prefix must have an output (block) in the index!
assert node.hasOutput();
// if (DEBUG) {
// System.out.println(" no seek state; push root frame");
// }
currentFrame = staticFrame;
// term.length = 0;
targetUpto = 0;
currentFrame = pushFrame(node, 0);
}
// if (DEBUG) {
// System.out.println(" start index loop targetUpto=" + targetUpto + " output=" + output +
// " currentFrame.ord=" + currentFrame.ord + " targetBeforeCurrentLength=" +
// targetBeforeCurrentLength);
// }
// We are done sharing the common prefix with the incoming target and where we are currently
// seek'd; now continue walking the index:
while (targetUpto < target.length) {
final int targetLabel = target.bytes[target.offset + targetUpto] & 0xFF;
final TrieReader.Node nextNode =
trieReader.lookupChild(targetLabel, node, getNode(1 + targetUpto));
if (nextNode == null) {
// Index is exhausted
// if (DEBUG) {
// System.out.println(" index: index exhausted label=" + ((char) targetLabel) + " " +
// toHex(targetLabel));
// }
validIndexPrefix = currentFrame.prefixLength;
// validIndexPrefix = targetUpto;
currentFrame.scanToFloorFrame(target);
if (!currentFrame.hasTerms) {
termExists = false;
term.setByteAt(targetUpto, (byte) targetLabel);
term.setLength(1 + targetUpto);
// if (DEBUG) {
// System.out.println(" FAST NOT_FOUND term=" + ToStringUtils.bytesRefToString(term));
// }
return null;
}
if (prefetch) {
currentFrame.prefetchBlock();
}
return () -> {
currentFrame.loadBlock();
final SeekStatus result = currentFrame.scanToTerm(target, true);
if (result == SeekStatus.FOUND) {
// if (DEBUG) {
// System.out.println(" return FOUND term=" + term.utf8ToString() + " " + term);
// }
return true;
} else {
// if (DEBUG) {
// System.out.println(" got " + result + "; return NOT_FOUND term=" +
// ToStringUtils.bytesRefToString(term));
// }
return false;
}
};
} else {
// Follow this node
node = nextNode;
term.setByteAt(targetUpto, (byte) targetLabel);
// Aggregate output as we go:
// if (DEBUG) {
// System.out.println(" index: follow label=" + toHex(target.bytes[target.offset +
// targetUpto]&0xff) + " node.output=" + node.output + " node.nfo=" + node.nextFinalOutput);
// }
targetUpto++;
if (node.hasOutput()) {
// if (DEBUG) System.out.println(" node is final!");
currentFrame = pushFrame(node, targetUpto);
// if (DEBUG) System.out.println(" curFrame.ord=" + currentFrame.ord + " hasTerms=" +
// currentFrame.hasTerms);
}
}
}
// validIndexPrefix = targetUpto;
validIndexPrefix = currentFrame.prefixLength;
currentFrame.scanToFloorFrame(target);
// Target term is entirely contained in the index:
if (!currentFrame.hasTerms) {
termExists = false;
term.setLength(targetUpto);
// if (DEBUG) {
// System.out.println(" FAST NOT_FOUND term=" + ToStringUtils.bytesRefToString(term));
// }
return null;
}
if (prefetch) {
currentFrame.prefetchBlock();
}
return () -> {
currentFrame.loadBlock();
final SeekStatus result = currentFrame.scanToTerm(target, true);
if (result == SeekStatus.FOUND) {
// if (DEBUG) {
// System.out.println(" return FOUND term=" + term.utf8ToString() + " " + term);
// }
return true;
} else {
// if (DEBUG) {
// System.out.println(" got result " + result + "; return NOT_FOUND term=" +
// term.utf8ToString());
// }
return false;
}
};
}
@Override
public IOBooleanSupplier prepareSeekExact(BytesRef target) throws IOException {
return prepareSeekExact(target, true);
}
@Override
public boolean seekExact(BytesRef target) throws IOException {
IOBooleanSupplier termExistsSupplier = prepareSeekExact(target, false);
return termExistsSupplier != null && termExistsSupplier.get();
}
@Override
public SeekStatus seekCeil(BytesRef target) throws IOException {
term.grow(1 + target.length);
assert clearEOF();
// if (DEBUG) {
// System.out.println("\nBTTR.seekCeil seg=" + fr.parent.segment + " target=" +
// fr.fieldInfo.name + ":" + ToStringUtils.bytesRefToString(target) + " current=" +
// ToStringUtils.bytesRefToString(term) + " (exists?=" + termExists +
// ") validIndexPrefix= " + validIndexPrefix);
// printSeekState(System.out);
// }
TrieReader.Node node;
int targetUpto;
targetBeforeCurrentLength = currentFrame.ord;
if (currentFrame != staticFrame) {
// We are already seek'd; find the common
// prefix of new seek term vs current term and
// re-use the corresponding seek state. For
// example, if app first seeks to foobar, then
// seeks to foobaz, we can re-use the seek state
// for the first 5 bytes.
// if (DEBUG) {
// System.out.println(" re-use current seek state validIndexPrefix=" + validIndexPrefix);
// }
node = nodes[0];
assert node.hasOutput();
targetUpto = 0;
SegmentTermsEnumFrame lastFrame = stack[0];
assert validIndexPrefix <= term.length();
final int targetLimit = Math.min(target.length, validIndexPrefix);
int cmp = 0;
// First compare up to valid seek frames:
while (targetUpto < targetLimit) {
cmp = (term.byteAt(targetUpto) & 0xFF) - (target.bytes[target.offset + targetUpto] & 0xFF);
// if (DEBUG) {
// System.out.println(" cycle targetUpto=" + targetUpto + " (vs limit=" + targetLimit +
// ") cmp=" + cmp + " (targetLabel=" + (char) (target.bytes[target.offset + targetUpto]) +
// " vs termLabel=" + (char) (term.byteAt(targetUpto)) + ")" + " node.output=" +
// node.output
// + " output=" + output);
// }
if (cmp != 0) {
break;
}
node = nodes[1 + targetUpto];
assert node.label == (target.bytes[target.offset + targetUpto] & 0xFF)
: "node.label="
+ (char) node.label
+ " targetLabel="
+ (char) (target.bytes[target.offset + targetUpto] & 0xFF);
if (node.hasOutput()) {
lastFrame = stack[1 + lastFrame.ord];
}
targetUpto++;
}
if (cmp == 0) {
// Second compare the rest of the term, but
// don't save node/output/frame:
cmp =
Arrays.compareUnsigned(
term.bytes(),
targetUpto,
term.length(),
target.bytes,
target.offset + targetUpto,
target.offset + target.length);
}
if (cmp < 0) {
// Common case: target term is after current
// term, ie, app is seeking multiple terms
// in sorted order
// if (DEBUG) {
// System.out.println(" target is after current (shares prefixLen=" + targetUpto + ");
// clear frame.scanned ord=" + lastFrame.ord);
// }
currentFrame = lastFrame;
} else if (cmp > 0) {
// Uncommon case: target term
// is before current term; this means we can
// keep the currentFrame but we must rewind it
// (so we scan from the start)
targetBeforeCurrentLength = 0;
// if (DEBUG) {
// System.out.println(" target is before current (shares prefixLen=" + targetUpto + ");
// rewind frame ord=" + lastFrame.ord);
// }
currentFrame = lastFrame;
currentFrame.rewind();
} else {
// Target is exactly the same as current term
assert term.length() == target.length;
if (termExists) {
// if (DEBUG) {
// System.out.println(" target is same as current; return FOUND");
// }
return SeekStatus.FOUND;
} else {
// if (DEBUG) {
// System.out.println(" target is same as current but term doesn't exist");
// }
}
}
} else {
targetBeforeCurrentLength = -1;
node = nodes[0] = trieReader.root;
// Empty string prefix must have an output (block) in the index!
assert node.hasOutput();
// if (DEBUG) {
// System.out.println(" no seek state; push root frame");
// }
currentFrame = staticFrame;
// term.length = 0;
targetUpto = 0;
currentFrame = pushFrame(node, 0);
}
// if (DEBUG) {
// System.out.println(" start index loop targetUpto=" + targetUpto + " output=" + output +
// " currentFrame.ord+1=" + currentFrame.ord + " targetBeforeCurrentLength=" +
// targetBeforeCurrentLength);
// }
// We are done sharing the common prefix with the incoming target and where we are currently
// seek'd; now continue walking the index:
while (targetUpto < target.length) {
final int targetLabel = target.bytes[target.offset + targetUpto] & 0xFF;
final TrieReader.Node nextNode =
trieReader.lookupChild(targetLabel, node, getNode(1 + targetUpto));
if (nextNode == null) {
// Index is exhausted
// if (DEBUG) {
// System.out.println(" index: index exhausted label=" + ((char) targetLabel) + " " +
// targetLabel);
// }
validIndexPrefix = currentFrame.prefixLength;
// validIndexPrefix = targetUpto;
currentFrame.scanToFloorFrame(target);
currentFrame.loadBlock();
// if (DEBUG) System.out.println(" now scanToTerm");
final SeekStatus result = currentFrame.scanToTerm(target, false);
if (result == SeekStatus.END) {
term.copyBytes(target);
termExists = false;
if (next() != null) {
// if (DEBUG) {
// System.out.println(" return NOT_FOUND term=" +
// ToStringUtils.bytesRefToString(term));
// }
return SeekStatus.NOT_FOUND;
} else {
// if (DEBUG) {
// System.out.println(" return END");
// }
return SeekStatus.END;
}
} else {
// if (DEBUG) {
// System.out.println(" return " + result + " term=" +
// ToStringUtils.bytesRefToString(term));
// }
return result;
}
} else {
// Follow this node
term.setByteAt(targetUpto, (byte) targetLabel);
node = nextNode;
// if (DEBUG) {
// System.out.println(" index: follow label=" + (target.bytes[target.offset +
// targetUpto]&0xff) + " node.output=" + node.output + " node.nfo=" + node.nextFinalOutput);
// }
targetUpto++;
if (node.hasOutput()) {
// if (DEBUG) System.out.println(" node is final!");
currentFrame = pushFrame(node, targetUpto);
// if (DEBUG) System.out.println(" curFrame.ord=" + currentFrame.ord + " hasTerms=" +
// currentFrame.hasTerms);
}
}
}
// validIndexPrefix = targetUpto;
validIndexPrefix = currentFrame.prefixLength;
currentFrame.scanToFloorFrame(target);
currentFrame.loadBlock();
final SeekStatus result = currentFrame.scanToTerm(target, false);
if (result == SeekStatus.END) {
term.copyBytes(target);
termExists = false;
if (next() != null) {
// if (DEBUG) {
// System.out.println(" return NOT_FOUND term=" + term.get().utf8ToString() + " " + term);
// }
return SeekStatus.NOT_FOUND;
} else {
// if (DEBUG) {
// System.out.println(" return END");
// }
return SeekStatus.END;
}
} else {
return result;
}
}
@SuppressWarnings("unused")
private void printSeekState(PrintStream out) throws IOException {
if (currentFrame == staticFrame) {
out.println(" no prior seek");
} else {
out.println(" prior seek state:");
int ord = 0;
boolean isSeekFrame = true;
while (true) {
SegmentTermsEnumFrame f = getFrame(ord);
assert f != null;
final BytesRef prefix = new BytesRef(term.get().bytes, 0, f.prefixLength);
if (f.nextEnt == -1) {
out.println(
" frame "
+ (isSeekFrame ? "(seek)" : "(next)")
+ " ord="
+ ord
+ " fp="
+ f.fp
+ (f.isFloor ? (" (fpOrig=" + f.fpOrig + ")") : "")
+ " prefixLen="
+ f.prefixLength
+ " prefix="
+ prefix
+ (f.nextEnt == -1 ? "" : (" (of " + f.entCount + ")"))
+ " hasTerms="
+ f.hasTerms
+ " isFloor="
+ f.isFloor
+ " isLastInFloor="
+ f.isLastInFloor
+ " mdUpto="
+ f.metaDataUpto
+ " tbOrd="
+ f.getTermBlockOrd());
} else {
out.println(
" frame "
+ (isSeekFrame ? "(seek, loaded)" : "(next, loaded)")
+ " ord="
+ ord
+ " fp="
+ f.fp
+ (f.isFloor ? (" (fpOrig=" + f.fpOrig + ")") : "")
+ " prefixLen="
+ f.prefixLength
+ " prefix="
+ prefix
+ " nextEnt="
+ f.nextEnt
+ (f.nextEnt == -1 ? "" : (" (of " + f.entCount + ")"))
+ " hasTerms="
+ f.hasTerms
+ " isFloor="
+ f.isFloor
+ " lastSubFP="
+ f.lastSubFP
+ " isLastInFloor="
+ f.isLastInFloor
+ " mdUpto="
+ f.metaDataUpto
+ " tbOrd="
+ f.getTermBlockOrd());
}
assert !isSeekFrame || f.node != null : "isSeekFrame=" + isSeekFrame + " f.node=" + f.node;
if (f.prefixLength > 0
&& isSeekFrame
&& f.node.label != (term.byteAt(f.prefixLength - 1) & 0xFF)) {
out.println(
" broken seek state: node.label="
+ (char) f.node.label
+ " vs term byte="
+ (char) (term.byteAt(f.prefixLength - 1) & 0xFF));
throw new RuntimeException("seek state is broken");
}
TrieReader.Node node = trieReader.root;
TrieReader.Node child = new TrieReader.Node();
for (int i = 0; i < prefix.length; i++) {
TrieReader.Node found =
trieReader.lookupChild(prefix.bytes[i + prefix.offset] & 0xFF, node, child);
if (found == null) {
throw new RuntimeException("seek state is broken, prefix not exist in index");
}
node = child;
child = new TrieReader.Node();
}
if (!node.hasOutput()) {
out.println(" broken seek state: prefix is not final in index");
throw new RuntimeException("seek state is broken");
} else if (isSeekFrame && !f.isFloor) {
if (f.fp != node.outputFp || f.hasTerms != node.hasTerms || f.isFloor != node.isFloor()) {
out.println(
" broken seek state: output fp="
+ node.outputFp
+ ", hasTerms="
+ node.hasTerms
+ ", isFloor="
+ node.isFloor()
+ " doesn't match frame fp="
+ f.fp
+ ", hasTerms="
+ f.hasTerms
+ ", isFloor="
+ f.isFloor);
throw new RuntimeException("seek state is broken");
}
}
if (f == currentFrame) {
break;
}
if (f.prefixLength == validIndexPrefix) {
isSeekFrame = false;
}
ord++;
}
}
}
/* Decodes only the term bytes of the next term. If caller then asks for
metadata, ie docFreq, totalTermFreq or pulls a D/&PEnum, we then (lazily)
decode all metadata up to the current term. */
@Override
public BytesRef next() throws IOException {
if (in == null) {
// Fresh TermsEnum; seek to first term:
final TrieReader.Node node = nodes[0] = trieReader.root;
currentFrame = pushFrame(node, 0);
currentFrame.loadBlock();
}
targetBeforeCurrentLength = currentFrame.ord;
assert !eof;
// if (DEBUG) {
// System.out.println("\nBTTR.next seg=" + fr.parent.segment + " term=" +
// ToStringUtils.bytesRefToString(term) + " termExists?=" + termExists + " field=" +
// fr.fieldInfo.name + " termBlockOrd=" + currentFrame.state.termBlockOrd +
// " validIndexPrefix=" + validIndexPrefix);
// printSeekState(System.out);
// }
if (currentFrame == staticFrame) {
// If seek was previously called and the term was
// cached, or seek(TermState) was called, usually
// caller is just going to pull a D/&PEnum or get
// docFreq, etc. But, if they then call next(),
// this method catches up all internal state so next()
// works properly:
// if (DEBUG) System.out.println(" re-seek to pending term=" + term.utf8ToString() + " " +
// term);
final boolean result = seekExact(term.get());
assert result;
}
// Pop finished blocks
while (currentFrame.nextEnt == currentFrame.entCount) {
if (!currentFrame.isLastInFloor) {
// Advance to next floor block
currentFrame.loadNextFloorBlock();
break;
} else {
// if (DEBUG) System.out.println(" pop frame");
if (currentFrame.ord == 0) {
// if (DEBUG) System.out.println(" return null");
assert setEOF();
term.clear();
validIndexPrefix = 0;
currentFrame.rewind();
termExists = false;
return null;
}
final long lastFP = currentFrame.fpOrig;
currentFrame = stack[currentFrame.ord - 1];
if (currentFrame.nextEnt == -1 || currentFrame.lastSubFP != lastFP) {
// We popped into a frame that's not loaded
// yet or not scan'd to the right entry
currentFrame.scanToFloorFrame(term.get());
currentFrame.loadBlock();
currentFrame.scanToSubBlock(lastFP);
}
// Note that the seek state (last seek) has been
// invalidated beyond this depth
validIndexPrefix = Math.min(validIndexPrefix, currentFrame.prefixLength);
// if (DEBUG) {
// System.out.println(" reset validIndexPrefix=" + validIndexPrefix);
// }
}
}
while (true) {
if (currentFrame.next()) {
// Push to new block:
// if (DEBUG) System.out.println(" push frame");
currentFrame = pushFrame(null, currentFrame.lastSubFP, term.length());
// This is a "next" frame -- even if it's
// floor'd we must pretend it isn't so we don't
// try to scan to the right floor frame:
currentFrame.loadBlock();
} else {
// if (DEBUG) System.out.println(" return term=" + ToStringUtils.bytesRefToString(term) +
// " currentFrame.ord=" + currentFrame.ord);
return term.get();
}
}
}
@Override
public BytesRef term() {
assert !eof;
return term.get();
}
@Override
public int docFreq() throws IOException {
assert !eof;
// if (DEBUG) System.out.println("BTR.docFreq");
currentFrame.decodeMetaData();
// if (DEBUG) System.out.println(" return " + currentFrame.state.docFreq);
return currentFrame.state.docFreq;
}
@Override
public long totalTermFreq() throws IOException {
assert !eof;
currentFrame.decodeMetaData();
return currentFrame.state.totalTermFreq;
}
@Override
public PostingsEnum postings(PostingsEnum reuse, int flags) throws IOException {
assert !eof;
// if (DEBUG) {
// System.out.println("BTTR.docs seg=" + segment);
// }
currentFrame.decodeMetaData();
// if (DEBUG) {
// System.out.println(" state=" + currentFrame.state);
// }
return fr.parent.postingsReader.postings(fr.fieldInfo, currentFrame.state, reuse, flags);
}
@Override
public ImpactsEnum impacts(int flags) throws IOException {
assert !eof;
// if (DEBUG) {
// System.out.println("BTTR.docs seg=" + segment);
// }
currentFrame.decodeMetaData();
// if (DEBUG) {
// System.out.println(" state=" + currentFrame.state);
// }
return fr.parent.postingsReader.impacts(fr.fieldInfo, currentFrame.state, flags);
}
@Override
public void seekExact(BytesRef target, TermState otherState) {
// if (DEBUG) {
// System.out.println("BTTR.seekExact termState seg=" + segment + " target=" +
// target.utf8ToString() + " " + target + " state=" + otherState);
// }
assert clearEOF();
if (target.compareTo(term.get()) != 0 || !termExists) {
assert otherState != null && otherState instanceof BlockTermState;
currentFrame = staticFrame;
currentFrame.state.copyFrom(otherState);
term.copyBytes(target);
currentFrame.metaDataUpto = currentFrame.getTermBlockOrd();
assert currentFrame.metaDataUpto > 0;
validIndexPrefix = 0;
} else {
// if (DEBUG) {
// System.out.println(" skip seek: already on target state=" + currentFrame.state);
// }
}
}
@Override
public TermState termState() throws IOException {
assert !eof;
currentFrame.decodeMetaData();
TermState ts = currentFrame.state.clone();
// if (DEBUG) System.out.println("BTTR.termState seg=" + segment + " state=" + ts);
return ts;
}
@Override
public void seekExact(long ord) {
throw new UnsupportedOperationException();
}
@Override
public long ord() {
throw new UnsupportedOperationException();
}
}
|
oracle/graal | 36,007 | compiler/src/jdk.graal.compiler.test/src/jdk/graal/compiler/truffle/test/BytecodeDSLCompilationTest.java | /*
* Copyright (c) 2024, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package jdk.graal.compiler.truffle.test;
import static com.oracle.truffle.api.bytecode.test.basic_interpreter.AbstractBasicInterpreterTest.createNodes;
import static com.oracle.truffle.api.bytecode.test.basic_interpreter.AbstractBasicInterpreterTest.parseNode;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.fail;
import static org.junit.Assume.assumeTrue;
import java.util.List;
import org.graalvm.polyglot.Context;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
import com.oracle.truffle.api.bytecode.BytecodeConfig;
import com.oracle.truffle.api.bytecode.BytecodeLocal;
import com.oracle.truffle.api.bytecode.BytecodeLocation;
import com.oracle.truffle.api.bytecode.BytecodeNode;
import com.oracle.truffle.api.bytecode.BytecodeParser;
import com.oracle.truffle.api.bytecode.BytecodeRootNodes;
import com.oracle.truffle.api.bytecode.ContinuationResult;
import com.oracle.truffle.api.bytecode.test.BytecodeDSLTestLanguage;
import com.oracle.truffle.api.bytecode.test.basic_interpreter.AbstractBasicInterpreterTest;
import com.oracle.truffle.api.bytecode.test.basic_interpreter.BasicInterpreter;
import com.oracle.truffle.api.bytecode.test.basic_interpreter.BasicInterpreterBuilder;
import com.oracle.truffle.api.frame.FrameSlotKind;
import com.oracle.truffle.api.frame.VirtualFrame;
import com.oracle.truffle.api.instrumentation.ExecutionEventNode;
import com.oracle.truffle.api.instrumentation.Instrumenter;
import com.oracle.truffle.api.instrumentation.SourceSectionFilter;
import com.oracle.truffle.api.instrumentation.StandardTags.RootTag;
import com.oracle.truffle.api.instrumentation.StandardTags.StatementTag;
import com.oracle.truffle.api.instrumentation.TruffleInstrument;
import com.oracle.truffle.api.source.Source;
import com.oracle.truffle.api.source.SourceSection;
import com.oracle.truffle.runtime.OptimizedCallTarget;
@RunWith(Parameterized.class)
public class BytecodeDSLCompilationTest extends TestWithSynchronousCompiling {
@Parameters(name = "{0}")
public static List<Class<? extends BasicInterpreter>> getInterpreterClasses() {
return AbstractBasicInterpreterTest.allInterpreters();
}
@Parameter(0) public Class<? extends BasicInterpreter> interpreterClass;
private boolean hasBoxingElimination() {
return new AbstractBasicInterpreterTest.TestRun(interpreterClass, false).hasBoxingElimination();
}
Context context;
Instrumenter instrumenter;
@Before
@Override
public void before() {
context = setupContext();
context.initialize(BytecodeDSLTestLanguage.ID);
instrumenter = context.getEngine().getInstruments().get(BytecodeDSLCompilationTestInstrumentation.ID).lookup(Instrumenter.class);
}
@BeforeClass
public static void beforeClass() {
/**
* Note: we force load the EarlyReturnException class because compilation bails out when it
* hasn't been loaded (the {@code interceptControlFlowException} method references it
* directly).
*/
try {
Class.forName(BasicInterpreter.EarlyReturnException.class.getName());
} catch (ClassNotFoundException ex) {
fail("should not have failed to load EarlyReturnException class");
}
}
/**
* The program below implements:
*
* <pre>
* var j = 0;
* var i = 0;
* var sum = 0;
* while (i < 500000) {
* j = j + 1;
* sum = sum + j;
* i = i + 1;
* }
* return sum;
* </pre>
*
* The result should be 125000250000.
*/
@Test
public void testOSR1() {
BasicInterpreter root = parseNode(interpreterClass, BytecodeDSLTestLanguage.REF.get(null), false, "osrRoot", b -> {
b.beginRoot();
BytecodeLocal iLoc = b.createLocal();
BytecodeLocal sumLoc = b.createLocal();
BytecodeLocal jLoc = b.createLocal();
// int j = 0;
b.beginStoreLocal(jLoc);
b.emitLoadConstant(0L);
b.endStoreLocal();
// int i = 0;
b.beginStoreLocal(iLoc);
b.emitLoadConstant(0L);
b.endStoreLocal();
// int sum = 0;
b.beginStoreLocal(sumLoc);
b.emitLoadConstant(0L);
b.endStoreLocal();
// while (i < TOTAL_ITERATIONS) {
b.beginWhile();
b.beginLess();
b.emitLoadLocal(iLoc);
b.emitLoadConstant(500000L);
b.endLess();
b.beginBlock();
// j = j + 1;
b.beginStoreLocal(jLoc);
b.beginAdd();
b.emitLoadLocal(jLoc);
b.emitLoadConstant(1L);
b.endAdd();
b.endStoreLocal();
// sum = sum + j;
b.beginStoreLocal(sumLoc);
b.beginAdd();
b.emitLoadLocal(sumLoc);
b.emitLoadLocal(jLoc);
b.endAdd();
b.endStoreLocal();
// i = i + 1;
b.beginStoreLocal(iLoc);
b.beginAdd();
b.emitLoadLocal(iLoc);
b.emitLoadConstant(1L);
b.endAdd();
b.endStoreLocal();
// }
b.endBlock();
b.endWhile();
// return sum;
b.beginReturn();
b.emitLoadLocal(sumLoc);
b.endReturn();
b.endRoot();
});
OptimizedCallTarget target = (OptimizedCallTarget) root.getCallTarget();
for (int i = 0; i < 10; i++) {
target.resetCompilationProfile();
assertEquals(125000250000L, target.call());
}
}
/**
* The program below implements:
*
* <pre>
* int i = 0;
* int sum = 0;
* while (i < 500000) {
* int j = 0;
* while (j < i) {
* int temp;
* if (i % 3 < 1) {
* temp = 1;
* } else {
* temp = i % 3;
* }
* j = j + temp;
* }
* sum = sum + j;
* i = i + 1;
* }
* return sum;
* </pre>
*
* The result should be 12497500.
*/
@Test
public void testOSR2() {
BasicInterpreter root = parseNode(interpreterClass, BytecodeDSLTestLanguage.REF.get(null), false, "osrRoot", b -> {
b.beginRoot();
BytecodeLocal iLoc = b.createLocal();
BytecodeLocal sumLoc = b.createLocal();
BytecodeLocal jLoc = b.createLocal();
BytecodeLocal tempLoc = b.createLocal();
// int j = 0;
b.beginStoreLocal(jLoc);
b.emitLoadConstant(0L);
b.endStoreLocal();
// int i = 0;
b.beginStoreLocal(iLoc);
b.emitLoadConstant(0L);
b.endStoreLocal();
// int sum = 0;
b.beginStoreLocal(sumLoc);
b.emitLoadConstant(0L);
b.endStoreLocal();
// while (i < TOTAL_ITERATIONS) {
b.beginWhile();
b.beginLess();
b.emitLoadLocal(iLoc);
b.emitLoadConstant(500000L);
b.endLess();
b.beginBlock();
// while (j < i) {
b.beginWhile();
b.beginLess();
b.emitLoadLocal(jLoc);
b.emitLoadLocal(iLoc);
b.endLess();
b.beginBlock();
// int temp;
// if (i % 3 < 1) {
b.beginIfThenElse();
b.beginLess();
b.beginMod();
b.emitLoadLocal(iLoc);
b.emitLoadConstant(3L);
b.endMod();
b.emitLoadConstant(1L);
b.endLess();
// temp = 1;
b.beginStoreLocal(tempLoc);
b.emitLoadConstant(1L);
b.endStoreLocal();
// } else {
// temp = i % 3;
b.beginStoreLocal(tempLoc);
b.beginMod();
b.emitLoadLocal(iLoc);
b.emitLoadConstant(3L);
b.endMod();
b.endStoreLocal();
// }
b.endIfThenElse();
// j = j + temp;
b.beginStoreLocal(jLoc);
b.beginAdd();
b.emitLoadLocal(jLoc);
b.emitLoadConstant(1L);
b.endAdd();
b.endStoreLocal();
// }
b.endBlock();
b.endWhile();
// sum = sum + j;
b.beginStoreLocal(sumLoc);
b.beginAdd();
b.emitLoadLocal(sumLoc);
b.emitLoadLocal(jLoc);
b.endAdd();
b.endStoreLocal();
// i = i + 1;
b.beginStoreLocal(iLoc);
b.beginAdd();
b.emitLoadLocal(iLoc);
b.emitLoadConstant(1L);
b.endAdd();
b.endStoreLocal();
// }
b.endBlock();
b.endWhile();
// return sum;
b.beginReturn();
b.emitLoadLocal(sumLoc);
b.endReturn();
b.endRoot();
});
OptimizedCallTarget target = (OptimizedCallTarget) root.getCallTarget();
for (int i = 0; i < 10; i++) {
// reset profile to avoid regular compilation
target.resetCompilationProfile();
assertEquals(124999750000L, target.call());
}
}
@Test
public void testCompiles() {
BasicInterpreter root = parseNodeForCompilation(interpreterClass, "addTwoConstants", b -> {
b.beginRoot();
b.beginReturn();
b.beginAdd();
b.emitLoadConstant(20L);
b.emitLoadConstant(22L);
b.endAdd();
b.endReturn();
b.endRoot();
});
OptimizedCallTarget target = (OptimizedCallTarget) root.getCallTarget();
assertEquals(42L, target.call());
target.compile(true);
assertCompiled(target);
assertEquals(42L, target.call());
assertCompiled(target);
}
@Test
public void testMultipleReturns() {
// return 30 + (arg0 ? 12 : (return 123; 0))
BasicInterpreter root = parseNodeForCompilation(interpreterClass, "multipleReturns", b -> {
b.beginRoot();
b.beginReturn();
b.beginAdd();
b.emitLoadConstant(30L);
b.beginConditional();
b.emitLoadArgument(0);
b.emitLoadConstant(12L);
b.beginBlock();
b.beginReturn();
b.emitLoadConstant(123L);
b.endReturn();
b.emitLoadConstant(0L);
b.endBlock();
b.endConditional();
b.endAdd();
b.endReturn();
b.endRoot();
});
OptimizedCallTarget target = (OptimizedCallTarget) root.getCallTarget();
assertEquals(42L, target.call(true));
assertEquals(123L, target.call(false));
target.compile(true);
assertCompiled(target);
assertEquals(42L, target.call(true));
assertEquals(123L, target.call(false));
assertCompiled(target);
}
/**
* When an root changes its local tags, compiled code should be invalidated.
*/
@Test
public void testStoreInvalidatesCode() {
assumeTrue(hasBoxingElimination());
BytecodeRootNodes<BasicInterpreter> rootNodes = createNodes(interpreterClass, BytecodeDSLTestLanguage.REF.get(null), false, BytecodeConfig.DEFAULT, b -> {
b.beginRoot();
BytecodeLocal x = b.createLocal("x", null);
b.beginStoreLocal(x);
b.emitLoadArgument(0);
b.endStoreLocal();
b.beginYield();
b.emitLoadNull();
b.endYield();
b.beginReturn();
b.emitLoadLocal(x);
b.endReturn();
b.endRoot();
});
BasicInterpreter root = rootNodes.getNode(0);
root.getBytecodeNode().setUncachedThreshold(0); // force cached
// Run once and check profile.
OptimizedCallTarget rootTarget = (OptimizedCallTarget) root.getCallTarget();
ContinuationResult cont = (ContinuationResult) rootTarget.call(42L);
OptimizedCallTarget contTarget = (OptimizedCallTarget) cont.getContinuationCallTarget();
assertEquals(42L, cont.continueWith(null));
assertEquals(FrameSlotKind.Long, root.getBytecodeNode().getLocals().get(0).getTypeProfile());
// Now, force compile root node and continuation.
rootTarget.compile(true);
contTarget.compile(true);
assertCompiled(rootTarget);
assertCompiled(contTarget);
// Run again to ensure nothing deopts.
cont = (ContinuationResult) rootTarget.call(123L);
assertCompiled(rootTarget);
assertEquals(123L, cont.continueWith(null));
assertCompiled(contTarget);
assertEquals(FrameSlotKind.Long, root.getBytecodeNode().getLocals().get(0).getTypeProfile());
// If we store a value with a different tag, both call targets should invalidate.
cont = (ContinuationResult) rootTarget.call("hello");
assertNotCompiled(rootTarget);
assertNotCompiled(contTarget);
assertEquals("hello", cont.continueWith(null));
assertEquals(FrameSlotKind.Object, root.getBytecodeNode().getLocals().get(0).getTypeProfile());
// Both call targets should recompile.
rootTarget.compile(true);
contTarget.compile(true);
assertCompiled(rootTarget);
assertCompiled(contTarget);
}
/**
* When a BytecodeNode store changes the local tags, compiled code should be invalidated.
*/
@Test
public void testBytecodeNodeStoreInvalidatesCode() {
assumeTrue(hasBoxingElimination());
BytecodeRootNodes<BasicInterpreter> rootNodes = createNodes(interpreterClass, BytecodeDSLTestLanguage.REF.get(null), false, BytecodeConfig.DEFAULT, b -> {
b.beginRoot();
BytecodeLocal x = b.createLocal("x", null);
b.beginStoreLocal(x);
b.emitLoadConstant(42L);
b.endStoreLocal();
b.beginYield();
b.emitLoadNull();
b.endYield();
b.beginReturn();
b.emitLoadLocal(x);
b.endReturn();
b.endRoot();
});
BasicInterpreter root = rootNodes.getNode(0);
root.getBytecodeNode().setUncachedThreshold(0); // force cached
// Run once and check profile.
OptimizedCallTarget rootTarget = (OptimizedCallTarget) root.getCallTarget();
ContinuationResult cont = (ContinuationResult) rootTarget.call();
OptimizedCallTarget contTarget = (OptimizedCallTarget) cont.getContinuationCallTarget();
assertEquals(42L, cont.continueWith(null));
assertEquals(FrameSlotKind.Long, root.getBytecodeNode().getLocals().get(0).getTypeProfile());
// Now, force compile root node and continuation.
rootTarget.compile(true);
contTarget.compile(true);
assertCompiled(rootTarget);
assertCompiled(contTarget);
// Run again to ensure nothing deopts.
cont = (ContinuationResult) rootTarget.call();
assertCompiled(rootTarget);
assertEquals(42L, cont.continueWith(null));
assertCompiled(contTarget);
assertEquals(FrameSlotKind.Long, root.getBytecodeNode().getLocals().get(0).getTypeProfile());
// If we store a value with the same tag, both call targets should stay valid.
cont = (ContinuationResult) rootTarget.call();
BytecodeLocation location = cont.getBytecodeLocation();
BytecodeNode bytecodeNode = location.getBytecodeNode();
bytecodeNode.setLocalValue(location.getBytecodeIndex(), cont.getFrame(), 0, 123L);
assertCompiled(rootTarget);
assertCompiled(contTarget);
assertEquals(123L, cont.continueWith(null));
assertEquals(FrameSlotKind.Long, root.getBytecodeNode().getLocals().get(0).getTypeProfile());
// If we store a value with a different tag, both call targets should invalidate.
cont = (ContinuationResult) rootTarget.call();
location = cont.getBytecodeLocation();
bytecodeNode = location.getBytecodeNode();
bytecodeNode.setLocalValue(location.getBytecodeIndex(), cont.getFrame(), 0, "hello");
assertNotCompiled(rootTarget);
assertNotCompiled(contTarget);
assertEquals("hello", cont.continueWith(null));
assertEquals(FrameSlotKind.Object, root.getBytecodeNode().getLocals().get(0).getTypeProfile());
// Both call targets should recompile.
rootTarget.compile(true);
contTarget.compile(true);
assertCompiled(rootTarget);
assertCompiled(contTarget);
}
/**
* When an inner root changes the local tags with a materialized store, compiled code should be
* invalidated.
*/
@Test
public void testMaterializedStoreInvalidatesCode() {
assumeTrue(hasBoxingElimination());
BytecodeRootNodes<BasicInterpreter> rootNodes = createNodes(interpreterClass, BytecodeDSLTestLanguage.REF.get(null), false, BytecodeConfig.DEFAULT, b -> {
b.beginRoot();
BytecodeLocal x = b.createLocal("x", null);
b.beginStoreLocal(x);
b.emitLoadConstant(42L);
b.endStoreLocal();
b.beginYield();
b.emitLoadNull();
b.endYield();
b.beginRoot(); // inner
b.beginStoreLocalMaterialized(x);
b.emitLoadArgument(0);
b.emitLoadArgument(1);
b.endStoreLocalMaterialized();
b.endRoot();
b.beginReturn();
b.emitLoadLocal(x);
b.endReturn();
b.endRoot();
});
BasicInterpreter outer = rootNodes.getNode(0);
outer.getBytecodeNode().setUncachedThreshold(0); // force cached
BasicInterpreter inner = rootNodes.getNode(1);
// Run once and check profile.
OptimizedCallTarget outerTarget = (OptimizedCallTarget) outer.getCallTarget();
ContinuationResult cont = (ContinuationResult) outerTarget.call();
OptimizedCallTarget contTarget = (OptimizedCallTarget) cont.getContinuationCallTarget();
assertEquals(42L, cont.continueWith(null));
assertEquals(FrameSlotKind.Long, outer.getBytecodeNode().getLocals().get(0).getTypeProfile());
// Now, force compile root node and continuation.
outerTarget.compile(true);
contTarget.compile(true);
assertCompiled(outerTarget);
assertCompiled(contTarget);
// Run again to ensure nothing deopts.
cont = (ContinuationResult) outerTarget.call();
assertCompiled(outerTarget);
assertEquals(42L, cont.continueWith(null));
assertCompiled(contTarget);
assertEquals(FrameSlotKind.Long, outer.getBytecodeNode().getLocals().get(0).getTypeProfile());
// If we store a value with the same tag, both call targets should stay valid.
cont = (ContinuationResult) outerTarget.call();
inner.getCallTarget().call(cont.getFrame(), 123L);
assertCompiled(outerTarget);
assertEquals(123L, cont.continueWith(null));
assertCompiled(contTarget);
assertEquals(FrameSlotKind.Long, outer.getBytecodeNode().getLocals().get(0).getTypeProfile());
// If we store a value with a different tag, both call targets should invalidate.
cont = (ContinuationResult) outerTarget.call();
inner.getCallTarget().call(cont.getFrame(), "hello");
assertNotCompiled(outerTarget);
assertNotCompiled(contTarget);
assertEquals("hello", cont.continueWith(null));
assertEquals(FrameSlotKind.Object, outer.getBytecodeNode().getLocals().get(0).getTypeProfile());
// Both call targets should recompile.
outerTarget.compile(true);
contTarget.compile(true);
assertCompiled(outerTarget);
assertCompiled(contTarget);
}
/**
* When an inner root changes the local tags with a materialized local accessor store, compiled
* code should be invalidated.
*/
@Test
public void testMaterializedAccessorStoreInvalidatesCode() {
assumeTrue(hasBoxingElimination());
BytecodeRootNodes<BasicInterpreter> rootNodes = createNodes(interpreterClass, BytecodeDSLTestLanguage.REF.get(null), false, BytecodeConfig.DEFAULT, b -> {
b.beginRoot();
BytecodeLocal x = b.createLocal("x", null);
b.beginStoreLocal(x);
b.emitLoadConstant(42L);
b.endStoreLocal();
b.beginYield();
b.emitLoadNull();
b.endYield();
b.beginRoot(); // inner
b.beginTeeMaterializedLocal(x);
b.emitLoadArgument(0);
b.emitLoadArgument(1);
b.endTeeMaterializedLocal();
b.endRoot();
b.beginReturn();
b.emitLoadLocal(x);
b.endReturn();
b.endRoot();
});
BasicInterpreter outer = rootNodes.getNode(0);
outer.getBytecodeNode().setUncachedThreshold(0); // force cached
BasicInterpreter inner = rootNodes.getNode(1);
// Run once and check profile.
OptimizedCallTarget outerTarget = (OptimizedCallTarget) outer.getCallTarget();
ContinuationResult cont = (ContinuationResult) outerTarget.call();
OptimizedCallTarget contTarget = (OptimizedCallTarget) cont.getContinuationCallTarget();
assertEquals(42L, cont.continueWith(null));
assertEquals(FrameSlotKind.Long, outer.getBytecodeNode().getLocals().get(0).getTypeProfile());
// Now, force compile root node and continuation.
outerTarget.compile(true);
contTarget.compile(true);
assertCompiled(outerTarget);
assertCompiled(contTarget);
// Run again to ensure nothing deopts.
cont = (ContinuationResult) outerTarget.call();
assertCompiled(outerTarget);
assertEquals(42L, cont.continueWith(null));
assertCompiled(contTarget);
assertEquals(FrameSlotKind.Long, outer.getBytecodeNode().getLocals().get(0).getTypeProfile());
// If we store a value with the same tag, both call targets should stay valid.
cont = (ContinuationResult) outerTarget.call();
inner.getCallTarget().call(cont.getFrame(), 123L);
assertCompiled(outerTarget);
assertEquals(123L, cont.continueWith(null));
assertCompiled(contTarget);
assertEquals(FrameSlotKind.Long, outer.getBytecodeNode().getLocals().get(0).getTypeProfile());
// If we store a value with a different tag, both call targets should invalidate.
cont = (ContinuationResult) outerTarget.call();
inner.getCallTarget().call(cont.getFrame(), "hello");
assertNotCompiled(outerTarget);
assertNotCompiled(contTarget);
assertEquals("hello", cont.continueWith(null));
assertEquals(FrameSlotKind.Object, outer.getBytecodeNode().getLocals().get(0).getTypeProfile());
// Both call targets should recompile.
outerTarget.compile(true);
contTarget.compile(true);
assertCompiled(outerTarget);
assertCompiled(contTarget);
}
@Test
public void testInstrumentation() {
BasicInterpreter root = parseNodeForCompilation(interpreterClass, "addTwoConstantsInstrumented", b -> {
b.beginRoot();
b.beginReturn();
b.beginIncrementValue();
b.beginAdd();
b.emitLoadConstant(20L);
b.emitLoadConstant(22L);
b.endAdd();
b.endIncrementValue();
b.endReturn();
b.endRoot();
});
OptimizedCallTarget target = (OptimizedCallTarget) root.getCallTarget();
assertEquals(42L, target.call());
target.compile(true);
assertCompiled(target);
// Instrumentation should invalidate the compiled code.
root.getRootNodes().update(
BasicInterpreterBuilder.invokeNewConfigBuilder(interpreterClass).addInstrumentation(BasicInterpreter.IncrementValue.class).build());
assertNotCompiled(target);
// The instrumented interpreter should be recompiled.
assertEquals(43L, target.call());
target.compile(true);
assertCompiled(target);
assertEquals(43L, target.call());
assertCompiled(target);
}
@Test
public void testYield() {
BasicInterpreter root = parseNodeForCompilation(interpreterClass, "addYield", b -> {
b.beginRoot();
b.beginReturn();
b.beginAdd();
b.emitLoadConstant(20L);
b.beginYield();
b.emitLoadConstant(123L);
b.endYield();
b.endAdd();
b.endReturn();
b.endRoot();
});
OptimizedCallTarget target = (OptimizedCallTarget) root.getCallTarget();
ContinuationResult cont = (ContinuationResult) target.call();
assertEquals(123L, cont.getResult());
OptimizedCallTarget continuationCallTarget = (OptimizedCallTarget) cont.getContinuationCallTarget();
assertEquals(42L, cont.continueWith(22L));
assertNotCompiled(target);
assertNotCompiled(continuationCallTarget);
target.compile(true);
cont = (ContinuationResult) target.call();
continuationCallTarget = (OptimizedCallTarget) cont.getContinuationCallTarget();
assertEquals(40L, cont.continueWith(20L));
assertCompiled(target);
assertNotCompiled(continuationCallTarget);
continuationCallTarget.compile(true);
cont = (ContinuationResult) target.call();
continuationCallTarget = (OptimizedCallTarget) cont.getContinuationCallTarget();
assertEquals(44L, cont.continueWith(24L));
assertCompiled(target);
assertCompiled(continuationCallTarget);
}
@Test
public void testYieldInstrumentation() {
BasicInterpreter root = parseNodeForCompilation(interpreterClass, "addYieldInstrumented", b -> {
b.beginRoot();
b.beginReturn();
b.beginIncrementValue();
b.beginAdd();
b.emitLoadConstant(20L);
b.beginYield();
b.emitLoadConstant(123L);
b.endYield();
b.endAdd();
b.endIncrementValue();
b.endReturn();
b.endRoot();
});
OptimizedCallTarget target = (OptimizedCallTarget) root.getCallTarget();
OptimizedCallTarget continuationCallTarget = null;
ContinuationResult cont = (ContinuationResult) target.call();
assertEquals(123L, cont.getResult());
continuationCallTarget = (OptimizedCallTarget) cont.getContinuationCallTarget();
assertEquals(42L, cont.continueWith(22L));
assertNotCompiled(target);
assertNotCompiled(continuationCallTarget);
target.compile(true);
continuationCallTarget.compile(true);
assertCompiled(target);
assertCompiled(continuationCallTarget);
// Instrumentation should invalidate the compiled code.
root.getRootNodes().update(
BasicInterpreterBuilder.invokeNewConfigBuilder(interpreterClass).addInstrumentation(BasicInterpreter.IncrementValue.class).build());
assertNotCompiled(target);
assertNotCompiled(continuationCallTarget);
// The instrumented interpreter should be recompiled.
assertEquals(43L, ((ContinuationResult) target.call()).continueWith(22L));
target.compile(true);
continuationCallTarget.compile(true);
assertCompiled(target);
assertCompiled(continuationCallTarget);
assertEquals(43L, ((ContinuationResult) target.call()).continueWith(22L));
assertCompiled(target);
assertCompiled(continuationCallTarget);
}
@Test
public void testCompiledSourceInfo() {
Source s = Source.newBuilder("test", "return sourcePosition", "compiledSourceInfo").build();
BasicInterpreter root = parseNodeForCompilation(interpreterClass, "compiledSourceInfo", b -> {
b.beginSource(s);
b.beginSourceSection(0, 21);
b.beginRoot();
b.beginReturn();
b.beginSourceSection(7, 14);
b.beginEnsureAndGetSourcePosition();
b.emitLoadArgument(0);
b.endEnsureAndGetSourcePosition();
b.endSourceSection();
b.endReturn();
b.endRoot();
b.endSourceSection();
b.endSource();
});
OptimizedCallTarget target = (OptimizedCallTarget) root.getCallTarget();
assertNull(target.call(false));
target.compile(true);
assertCompiled(target);
// Reparse with sources. The compiled code should not invalidate.
root.getBytecodeNode().ensureSourceInformation();
assertCompiled(target);
// Calling the compiled code won't update the sources.
assertNull(target.call(false));
assertCompiled(target);
// Calling ensureSourceInformation from compiled code should deopt and update the sources.
assertEquals("sourcePosition", ((SourceSection) target.call(true)).getCharacters().toString());
assertNotCompiled(target);
// If we recompile, source information should be available.
target.compile(true);
assertCompiled(target);
assertEquals("sourcePosition", ((SourceSection) target.call(false)).getCharacters().toString());
assertCompiled(target);
// Calling ensureSourceInformation when sources are available should not deopt.
assertEquals("sourcePosition", ((SourceSection) target.call(true)).getCharacters().toString());
assertCompiled(target);
}
@Test
public void testTagInstrumentation() {
BasicInterpreter root = parseNodeForCompilation(interpreterClass, "tagInstrumentation", b -> {
b.beginRoot();
// i = 0
BytecodeLocal i = b.createLocal();
b.beginTag(StatementTag.class);
b.beginStoreLocal(i);
b.emitLoadConstant(0L);
b.endStoreLocal();
b.endTag(StatementTag.class);
// while i < arg0
b.beginWhile();
b.beginTag(StatementTag.class);
b.beginLess();
b.emitLoadLocal(i);
b.emitLoadArgument(0);
b.endLess();
b.endTag(StatementTag.class);
// i = i + 1;
b.beginTag(StatementTag.class);
b.beginStoreLocal(i);
b.beginAdd();
b.emitLoadLocal(i);
b.emitLoadConstant(1L);
b.endAdd();
b.endStoreLocal();
b.endTag(StatementTag.class);
b.endWhile();
// return i
b.beginTag(StatementTag.class);
b.beginReturn();
b.emitLoadLocal(i);
b.endReturn();
b.endTag(StatementTag.class);
b.endRoot();
});
OptimizedCallTarget target = (OptimizedCallTarget) root.getCallTarget();
assertEquals(5L, target.call(5L));
// Ensure it compiles without tags.
target.compile(true);
assertCompiled(target);
// It shouldn't deopt.
assertEquals(42L, target.call(42L));
assertCompiled(target);
// Reparsing with tags should invalidate the code, but it should recompile.
// Expected count: 1 enter + (n+1) condition + n loop body + 1 return = 2n + 3
Counter c = attachCounter(StatementTag.class);
assertNotCompiled(target);
target.resetCompilationProfile();
assertEquals(5L, target.call(5L));
assertEquals(13, c.get());
assertNotCompiled(target);
target.compile(true);
assertCompiled(target);
// It shouldn't deopt.
c.clear();
assertEquals(11L, target.call(11L));
assertEquals(25, c.get());
assertCompiled(target);
// Attaching a second binding with different tags should invalidate the code again.
Counter c2 = attachCounter(RootTag.class);
assertNotCompiled(target);
c.clear();
assertEquals(5L, target.call(5L));
assertEquals(13, c.get());
assertEquals(1, c2.get());
assertNotCompiled(target);
target.compile(true);
assertCompiled(target);
// It shouldn't deopt.
c.clear();
c2.clear();
assertEquals(20L, target.call(20L));
assertEquals(43, c.get());
assertEquals(1, c2.get());
assertCompiled(target);
}
@TruffleInstrument.Registration(id = BytecodeDSLCompilationTestInstrumentation.ID, services = Instrumenter.class)
public static class BytecodeDSLCompilationTestInstrumentation extends TruffleInstrument {
public static final String ID = "bytecode_CompilationTestInstrument";
@Override
protected void onCreate(Env env) {
env.registerService(env.getInstrumenter());
}
}
private static final class Counter {
private int count = 0;
public int get() {
return count;
}
public void inc() {
count++;
}
public void clear() {
count = 0;
}
}
private Counter attachCounter(Class<?>... tags) {
Counter c = new Counter();
instrumenter.attachExecutionEventFactory(SourceSectionFilter.newBuilder().tagIs(tags).build(), (_) -> {
return new ExecutionEventNode() {
@Override
public void onEnter(VirtualFrame f) {
c.inc();
}
};
});
return c;
}
private static <T extends BasicInterpreterBuilder> BasicInterpreter parseNodeForCompilation(Class<? extends BasicInterpreter> interpreterClass, String rootName, BytecodeParser<T> builder) {
BasicInterpreter result = parseNode(interpreterClass, BytecodeDSLTestLanguage.REF.get(null), false, rootName, builder);
result.getBytecodeNode().setUncachedThreshold(0); // force interpreter to skip tier 0
return result;
}
}
|
googleads/google-ads-java | 36,014 | google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/resources/CustomerLabel.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v19/resources/customer_label.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v19.resources;
/**
* <pre>
* Represents a relationship between a customer and a label. This customer may
* not have access to all the labels attached to it. Additional CustomerLabels
* may be returned by increasing permissions with login-customer-id.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.resources.CustomerLabel}
*/
public final class CustomerLabel extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v19.resources.CustomerLabel)
CustomerLabelOrBuilder {
private static final long serialVersionUID = 0L;
// Use CustomerLabel.newBuilder() to construct.
private CustomerLabel(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CustomerLabel() {
resourceName_ = "";
customer_ = "";
label_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new CustomerLabel();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.resources.CustomerLabelProto.internal_static_google_ads_googleads_v19_resources_CustomerLabel_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.resources.CustomerLabelProto.internal_static_google_ads_googleads_v19_resources_CustomerLabel_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.resources.CustomerLabel.class, com.google.ads.googleads.v19.resources.CustomerLabel.Builder.class);
}
private int bitField0_;
public static final int RESOURCE_NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object resourceName_ = "";
/**
* <pre>
* Immutable. Name of the resource.
* Customer label resource names have the form:
* `customers/{customer_id}/customerLabels/{label_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
@java.lang.Override
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
}
}
/**
* <pre>
* Immutable. Name of the resource.
* Customer label resource names have the form:
* `customers/{customer_id}/customerLabels/{label_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CUSTOMER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object customer_ = "";
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return Whether the customer field is set.
*/
@java.lang.Override
public boolean hasCustomer() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The customer.
*/
@java.lang.Override
public java.lang.String getCustomer() {
java.lang.Object ref = customer_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customer_ = s;
return s;
}
}
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for customer.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getCustomerBytes() {
java.lang.Object ref = customer_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customer_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LABEL_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object label_ = "";
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return Whether the label field is set.
*/
@java.lang.Override
public boolean hasLabel() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The label.
*/
@java.lang.Override
public java.lang.String getLabel() {
java.lang.Object ref = label_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
label_ = s;
return s;
}
}
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for label.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getLabelBytes() {
java.lang.Object ref = label_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
label_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_);
}
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, customer_);
}
if (((bitField0_ & 0x00000002) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, label_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, customer_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, label_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v19.resources.CustomerLabel)) {
return super.equals(obj);
}
com.google.ads.googleads.v19.resources.CustomerLabel other = (com.google.ads.googleads.v19.resources.CustomerLabel) obj;
if (!getResourceName()
.equals(other.getResourceName())) return false;
if (hasCustomer() != other.hasCustomer()) return false;
if (hasCustomer()) {
if (!getCustomer()
.equals(other.getCustomer())) return false;
}
if (hasLabel() != other.hasLabel()) return false;
if (hasLabel()) {
if (!getLabel()
.equals(other.getLabel())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getResourceName().hashCode();
if (hasCustomer()) {
hash = (37 * hash) + CUSTOMER_FIELD_NUMBER;
hash = (53 * hash) + getCustomer().hashCode();
}
if (hasLabel()) {
hash = (37 * hash) + LABEL_FIELD_NUMBER;
hash = (53 * hash) + getLabel().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v19.resources.CustomerLabel parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.resources.CustomerLabel parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.resources.CustomerLabel parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.resources.CustomerLabel parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.resources.CustomerLabel parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.resources.CustomerLabel parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.resources.CustomerLabel parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.resources.CustomerLabel parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.resources.CustomerLabel parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.resources.CustomerLabel parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.resources.CustomerLabel parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.resources.CustomerLabel parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v19.resources.CustomerLabel prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Represents a relationship between a customer and a label. This customer may
* not have access to all the labels attached to it. Additional CustomerLabels
* may be returned by increasing permissions with login-customer-id.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.resources.CustomerLabel}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.resources.CustomerLabel)
com.google.ads.googleads.v19.resources.CustomerLabelOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.resources.CustomerLabelProto.internal_static_google_ads_googleads_v19_resources_CustomerLabel_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.resources.CustomerLabelProto.internal_static_google_ads_googleads_v19_resources_CustomerLabel_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.resources.CustomerLabel.class, com.google.ads.googleads.v19.resources.CustomerLabel.Builder.class);
}
// Construct using com.google.ads.googleads.v19.resources.CustomerLabel.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
resourceName_ = "";
customer_ = "";
label_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v19.resources.CustomerLabelProto.internal_static_google_ads_googleads_v19_resources_CustomerLabel_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v19.resources.CustomerLabel getDefaultInstanceForType() {
return com.google.ads.googleads.v19.resources.CustomerLabel.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v19.resources.CustomerLabel build() {
com.google.ads.googleads.v19.resources.CustomerLabel result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v19.resources.CustomerLabel buildPartial() {
com.google.ads.googleads.v19.resources.CustomerLabel result = new com.google.ads.googleads.v19.resources.CustomerLabel(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v19.resources.CustomerLabel result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.resourceName_ = resourceName_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.customer_ = customer_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.label_ = label_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v19.resources.CustomerLabel) {
return mergeFrom((com.google.ads.googleads.v19.resources.CustomerLabel)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v19.resources.CustomerLabel other) {
if (other == com.google.ads.googleads.v19.resources.CustomerLabel.getDefaultInstance()) return this;
if (!other.getResourceName().isEmpty()) {
resourceName_ = other.resourceName_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasCustomer()) {
customer_ = other.customer_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasLabel()) {
label_ = other.label_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
resourceName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 34: {
customer_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 34
case 42: {
label_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 42
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object resourceName_ = "";
/**
* <pre>
* Immutable. Name of the resource.
* Customer label resource names have the form:
* `customers/{customer_id}/customerLabels/{label_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Immutable. Name of the resource.
* Customer label resource names have the form:
* `customers/{customer_id}/customerLabels/{label_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Immutable. Name of the resource.
* Customer label resource names have the form:
* `customers/{customer_id}/customerLabels/{label_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @param value The resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceName(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
resourceName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Immutable. Name of the resource.
* Customer label resource names have the form:
* `customers/{customer_id}/customerLabels/{label_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearResourceName() {
resourceName_ = getDefaultInstance().getResourceName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <pre>
* Immutable. Name of the resource.
* Customer label resource names have the form:
* `customers/{customer_id}/customerLabels/{label_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @param value The bytes for resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
resourceName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object customer_ = "";
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return Whether the customer field is set.
*/
public boolean hasCustomer() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The customer.
*/
public java.lang.String getCustomer() {
java.lang.Object ref = customer_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customer_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for customer.
*/
public com.google.protobuf.ByteString
getCustomerBytes() {
java.lang.Object ref = customer_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customer_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @param value The customer to set.
* @return This builder for chaining.
*/
public Builder setCustomer(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
customer_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearCustomer() {
customer_ = getDefaultInstance().getCustomer();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @param value The bytes for customer to set.
* @return This builder for chaining.
*/
public Builder setCustomerBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
customer_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object label_ = "";
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return Whether the label field is set.
*/
public boolean hasLabel() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The label.
*/
public java.lang.String getLabel() {
java.lang.Object ref = label_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
label_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for label.
*/
public com.google.protobuf.ByteString
getLabelBytes() {
java.lang.Object ref = label_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
label_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @param value The label to set.
* @return This builder for chaining.
*/
public Builder setLabel(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
label_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearLabel() {
label_ = getDefaultInstance().getLabel();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @param value The bytes for label to set.
* @return This builder for chaining.
*/
public Builder setLabelBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
label_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.resources.CustomerLabel)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v19.resources.CustomerLabel)
private static final com.google.ads.googleads.v19.resources.CustomerLabel DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v19.resources.CustomerLabel();
}
public static com.google.ads.googleads.v19.resources.CustomerLabel getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CustomerLabel>
PARSER = new com.google.protobuf.AbstractParser<CustomerLabel>() {
@java.lang.Override
public CustomerLabel parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CustomerLabel> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CustomerLabel> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v19.resources.CustomerLabel getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 36,014 | google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/resources/CustomerLabel.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v20/resources/customer_label.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v20.resources;
/**
* <pre>
* Represents a relationship between a customer and a label. This customer may
* not have access to all the labels attached to it. Additional CustomerLabels
* may be returned by increasing permissions with login-customer-id.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.resources.CustomerLabel}
*/
public final class CustomerLabel extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v20.resources.CustomerLabel)
CustomerLabelOrBuilder {
private static final long serialVersionUID = 0L;
// Use CustomerLabel.newBuilder() to construct.
private CustomerLabel(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CustomerLabel() {
resourceName_ = "";
customer_ = "";
label_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new CustomerLabel();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.resources.CustomerLabelProto.internal_static_google_ads_googleads_v20_resources_CustomerLabel_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.resources.CustomerLabelProto.internal_static_google_ads_googleads_v20_resources_CustomerLabel_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.resources.CustomerLabel.class, com.google.ads.googleads.v20.resources.CustomerLabel.Builder.class);
}
private int bitField0_;
public static final int RESOURCE_NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object resourceName_ = "";
/**
* <pre>
* Immutable. Name of the resource.
* Customer label resource names have the form:
* `customers/{customer_id}/customerLabels/{label_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
@java.lang.Override
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
}
}
/**
* <pre>
* Immutable. Name of the resource.
* Customer label resource names have the form:
* `customers/{customer_id}/customerLabels/{label_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CUSTOMER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object customer_ = "";
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return Whether the customer field is set.
*/
@java.lang.Override
public boolean hasCustomer() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The customer.
*/
@java.lang.Override
public java.lang.String getCustomer() {
java.lang.Object ref = customer_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customer_ = s;
return s;
}
}
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for customer.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getCustomerBytes() {
java.lang.Object ref = customer_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customer_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LABEL_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object label_ = "";
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return Whether the label field is set.
*/
@java.lang.Override
public boolean hasLabel() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The label.
*/
@java.lang.Override
public java.lang.String getLabel() {
java.lang.Object ref = label_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
label_ = s;
return s;
}
}
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for label.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getLabelBytes() {
java.lang.Object ref = label_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
label_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_);
}
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, customer_);
}
if (((bitField0_ & 0x00000002) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, label_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, customer_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, label_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v20.resources.CustomerLabel)) {
return super.equals(obj);
}
com.google.ads.googleads.v20.resources.CustomerLabel other = (com.google.ads.googleads.v20.resources.CustomerLabel) obj;
if (!getResourceName()
.equals(other.getResourceName())) return false;
if (hasCustomer() != other.hasCustomer()) return false;
if (hasCustomer()) {
if (!getCustomer()
.equals(other.getCustomer())) return false;
}
if (hasLabel() != other.hasLabel()) return false;
if (hasLabel()) {
if (!getLabel()
.equals(other.getLabel())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getResourceName().hashCode();
if (hasCustomer()) {
hash = (37 * hash) + CUSTOMER_FIELD_NUMBER;
hash = (53 * hash) + getCustomer().hashCode();
}
if (hasLabel()) {
hash = (37 * hash) + LABEL_FIELD_NUMBER;
hash = (53 * hash) + getLabel().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v20.resources.CustomerLabel parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.resources.CustomerLabel parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.resources.CustomerLabel parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.resources.CustomerLabel parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.resources.CustomerLabel parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.resources.CustomerLabel parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.resources.CustomerLabel parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.resources.CustomerLabel parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.resources.CustomerLabel parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.resources.CustomerLabel parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.resources.CustomerLabel parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.resources.CustomerLabel parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v20.resources.CustomerLabel prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Represents a relationship between a customer and a label. This customer may
* not have access to all the labels attached to it. Additional CustomerLabels
* may be returned by increasing permissions with login-customer-id.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.resources.CustomerLabel}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.resources.CustomerLabel)
com.google.ads.googleads.v20.resources.CustomerLabelOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.resources.CustomerLabelProto.internal_static_google_ads_googleads_v20_resources_CustomerLabel_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.resources.CustomerLabelProto.internal_static_google_ads_googleads_v20_resources_CustomerLabel_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.resources.CustomerLabel.class, com.google.ads.googleads.v20.resources.CustomerLabel.Builder.class);
}
// Construct using com.google.ads.googleads.v20.resources.CustomerLabel.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
resourceName_ = "";
customer_ = "";
label_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v20.resources.CustomerLabelProto.internal_static_google_ads_googleads_v20_resources_CustomerLabel_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v20.resources.CustomerLabel getDefaultInstanceForType() {
return com.google.ads.googleads.v20.resources.CustomerLabel.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v20.resources.CustomerLabel build() {
com.google.ads.googleads.v20.resources.CustomerLabel result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v20.resources.CustomerLabel buildPartial() {
com.google.ads.googleads.v20.resources.CustomerLabel result = new com.google.ads.googleads.v20.resources.CustomerLabel(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v20.resources.CustomerLabel result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.resourceName_ = resourceName_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.customer_ = customer_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.label_ = label_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v20.resources.CustomerLabel) {
return mergeFrom((com.google.ads.googleads.v20.resources.CustomerLabel)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v20.resources.CustomerLabel other) {
if (other == com.google.ads.googleads.v20.resources.CustomerLabel.getDefaultInstance()) return this;
if (!other.getResourceName().isEmpty()) {
resourceName_ = other.resourceName_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasCustomer()) {
customer_ = other.customer_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasLabel()) {
label_ = other.label_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
resourceName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 34: {
customer_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 34
case 42: {
label_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 42
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object resourceName_ = "";
/**
* <pre>
* Immutable. Name of the resource.
* Customer label resource names have the form:
* `customers/{customer_id}/customerLabels/{label_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Immutable. Name of the resource.
* Customer label resource names have the form:
* `customers/{customer_id}/customerLabels/{label_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Immutable. Name of the resource.
* Customer label resource names have the form:
* `customers/{customer_id}/customerLabels/{label_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @param value The resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceName(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
resourceName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Immutable. Name of the resource.
* Customer label resource names have the form:
* `customers/{customer_id}/customerLabels/{label_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearResourceName() {
resourceName_ = getDefaultInstance().getResourceName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <pre>
* Immutable. Name of the resource.
* Customer label resource names have the form:
* `customers/{customer_id}/customerLabels/{label_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @param value The bytes for resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
resourceName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object customer_ = "";
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return Whether the customer field is set.
*/
public boolean hasCustomer() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The customer.
*/
public java.lang.String getCustomer() {
java.lang.Object ref = customer_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customer_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for customer.
*/
public com.google.protobuf.ByteString
getCustomerBytes() {
java.lang.Object ref = customer_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customer_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @param value The customer to set.
* @return This builder for chaining.
*/
public Builder setCustomer(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
customer_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearCustomer() {
customer_ = getDefaultInstance().getCustomer();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @param value The bytes for customer to set.
* @return This builder for chaining.
*/
public Builder setCustomerBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
customer_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object label_ = "";
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return Whether the label field is set.
*/
public boolean hasLabel() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The label.
*/
public java.lang.String getLabel() {
java.lang.Object ref = label_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
label_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for label.
*/
public com.google.protobuf.ByteString
getLabelBytes() {
java.lang.Object ref = label_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
label_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @param value The label to set.
* @return This builder for chaining.
*/
public Builder setLabel(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
label_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearLabel() {
label_ = getDefaultInstance().getLabel();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @param value The bytes for label to set.
* @return This builder for chaining.
*/
public Builder setLabelBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
label_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.resources.CustomerLabel)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v20.resources.CustomerLabel)
private static final com.google.ads.googleads.v20.resources.CustomerLabel DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v20.resources.CustomerLabel();
}
public static com.google.ads.googleads.v20.resources.CustomerLabel getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CustomerLabel>
PARSER = new com.google.protobuf.AbstractParser<CustomerLabel>() {
@java.lang.Override
public CustomerLabel parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CustomerLabel> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CustomerLabel> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v20.resources.CustomerLabel getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 36,014 | google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/resources/CustomerLabel.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/resources/customer_label.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.resources;
/**
* <pre>
* Represents a relationship between a customer and a label. This customer may
* not have access to all the labels attached to it. Additional CustomerLabels
* may be returned by increasing permissions with login-customer-id.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.resources.CustomerLabel}
*/
public final class CustomerLabel extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.resources.CustomerLabel)
CustomerLabelOrBuilder {
private static final long serialVersionUID = 0L;
// Use CustomerLabel.newBuilder() to construct.
private CustomerLabel(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CustomerLabel() {
resourceName_ = "";
customer_ = "";
label_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new CustomerLabel();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.resources.CustomerLabelProto.internal_static_google_ads_googleads_v21_resources_CustomerLabel_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.resources.CustomerLabelProto.internal_static_google_ads_googleads_v21_resources_CustomerLabel_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.resources.CustomerLabel.class, com.google.ads.googleads.v21.resources.CustomerLabel.Builder.class);
}
private int bitField0_;
public static final int RESOURCE_NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object resourceName_ = "";
/**
* <pre>
* Immutable. Name of the resource.
* Customer label resource names have the form:
* `customers/{customer_id}/customerLabels/{label_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
@java.lang.Override
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
}
}
/**
* <pre>
* Immutable. Name of the resource.
* Customer label resource names have the form:
* `customers/{customer_id}/customerLabels/{label_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CUSTOMER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object customer_ = "";
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return Whether the customer field is set.
*/
@java.lang.Override
public boolean hasCustomer() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The customer.
*/
@java.lang.Override
public java.lang.String getCustomer() {
java.lang.Object ref = customer_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customer_ = s;
return s;
}
}
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for customer.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getCustomerBytes() {
java.lang.Object ref = customer_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customer_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LABEL_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object label_ = "";
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return Whether the label field is set.
*/
@java.lang.Override
public boolean hasLabel() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The label.
*/
@java.lang.Override
public java.lang.String getLabel() {
java.lang.Object ref = label_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
label_ = s;
return s;
}
}
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for label.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getLabelBytes() {
java.lang.Object ref = label_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
label_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_);
}
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, customer_);
}
if (((bitField0_ & 0x00000002) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, label_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, customer_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, label_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.resources.CustomerLabel)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.resources.CustomerLabel other = (com.google.ads.googleads.v21.resources.CustomerLabel) obj;
if (!getResourceName()
.equals(other.getResourceName())) return false;
if (hasCustomer() != other.hasCustomer()) return false;
if (hasCustomer()) {
if (!getCustomer()
.equals(other.getCustomer())) return false;
}
if (hasLabel() != other.hasLabel()) return false;
if (hasLabel()) {
if (!getLabel()
.equals(other.getLabel())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getResourceName().hashCode();
if (hasCustomer()) {
hash = (37 * hash) + CUSTOMER_FIELD_NUMBER;
hash = (53 * hash) + getCustomer().hashCode();
}
if (hasLabel()) {
hash = (37 * hash) + LABEL_FIELD_NUMBER;
hash = (53 * hash) + getLabel().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.resources.CustomerLabel parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.resources.CustomerLabel parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.resources.CustomerLabel parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.resources.CustomerLabel parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.resources.CustomerLabel parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.resources.CustomerLabel parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.resources.CustomerLabel parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.resources.CustomerLabel parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.resources.CustomerLabel parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.resources.CustomerLabel parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.resources.CustomerLabel parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.resources.CustomerLabel parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.resources.CustomerLabel prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Represents a relationship between a customer and a label. This customer may
* not have access to all the labels attached to it. Additional CustomerLabels
* may be returned by increasing permissions with login-customer-id.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.resources.CustomerLabel}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.resources.CustomerLabel)
com.google.ads.googleads.v21.resources.CustomerLabelOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.resources.CustomerLabelProto.internal_static_google_ads_googleads_v21_resources_CustomerLabel_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.resources.CustomerLabelProto.internal_static_google_ads_googleads_v21_resources_CustomerLabel_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.resources.CustomerLabel.class, com.google.ads.googleads.v21.resources.CustomerLabel.Builder.class);
}
// Construct using com.google.ads.googleads.v21.resources.CustomerLabel.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
resourceName_ = "";
customer_ = "";
label_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.resources.CustomerLabelProto.internal_static_google_ads_googleads_v21_resources_CustomerLabel_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.resources.CustomerLabel getDefaultInstanceForType() {
return com.google.ads.googleads.v21.resources.CustomerLabel.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.resources.CustomerLabel build() {
com.google.ads.googleads.v21.resources.CustomerLabel result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.resources.CustomerLabel buildPartial() {
com.google.ads.googleads.v21.resources.CustomerLabel result = new com.google.ads.googleads.v21.resources.CustomerLabel(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v21.resources.CustomerLabel result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.resourceName_ = resourceName_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.customer_ = customer_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.label_ = label_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.resources.CustomerLabel) {
return mergeFrom((com.google.ads.googleads.v21.resources.CustomerLabel)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.resources.CustomerLabel other) {
if (other == com.google.ads.googleads.v21.resources.CustomerLabel.getDefaultInstance()) return this;
if (!other.getResourceName().isEmpty()) {
resourceName_ = other.resourceName_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasCustomer()) {
customer_ = other.customer_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasLabel()) {
label_ = other.label_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
resourceName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 34: {
customer_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 34
case 42: {
label_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 42
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object resourceName_ = "";
/**
* <pre>
* Immutable. Name of the resource.
* Customer label resource names have the form:
* `customers/{customer_id}/customerLabels/{label_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Immutable. Name of the resource.
* Customer label resource names have the form:
* `customers/{customer_id}/customerLabels/{label_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Immutable. Name of the resource.
* Customer label resource names have the form:
* `customers/{customer_id}/customerLabels/{label_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @param value The resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceName(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
resourceName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Immutable. Name of the resource.
* Customer label resource names have the form:
* `customers/{customer_id}/customerLabels/{label_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearResourceName() {
resourceName_ = getDefaultInstance().getResourceName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <pre>
* Immutable. Name of the resource.
* Customer label resource names have the form:
* `customers/{customer_id}/customerLabels/{label_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @param value The bytes for resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
resourceName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object customer_ = "";
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return Whether the customer field is set.
*/
public boolean hasCustomer() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The customer.
*/
public java.lang.String getCustomer() {
java.lang.Object ref = customer_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customer_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for customer.
*/
public com.google.protobuf.ByteString
getCustomerBytes() {
java.lang.Object ref = customer_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customer_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @param value The customer to set.
* @return This builder for chaining.
*/
public Builder setCustomer(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
customer_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearCustomer() {
customer_ = getDefaultInstance().getCustomer();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Output only. The resource name of the customer to which the label is
* attached. Read only.
* </pre>
*
* <code>optional string customer = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @param value The bytes for customer to set.
* @return This builder for chaining.
*/
public Builder setCustomerBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
customer_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object label_ = "";
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return Whether the label field is set.
*/
public boolean hasLabel() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The label.
*/
public java.lang.String getLabel() {
java.lang.Object ref = label_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
label_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for label.
*/
public com.google.protobuf.ByteString
getLabelBytes() {
java.lang.Object ref = label_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
label_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @param value The label to set.
* @return This builder for chaining.
*/
public Builder setLabel(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
label_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearLabel() {
label_ = getDefaultInstance().getLabel();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* <pre>
* Output only. The resource name of the label assigned to the customer.
*
* Note: the Customer ID portion of the label resource name is not
* validated when creating a new CustomerLabel.
* </pre>
*
* <code>optional string label = 5 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @param value The bytes for label to set.
* @return This builder for chaining.
*/
public Builder setLabelBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
label_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.resources.CustomerLabel)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.resources.CustomerLabel)
private static final com.google.ads.googleads.v21.resources.CustomerLabel DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.resources.CustomerLabel();
}
public static com.google.ads.googleads.v21.resources.CustomerLabel getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CustomerLabel>
PARSER = new com.google.protobuf.AbstractParser<CustomerLabel>() {
@java.lang.Override
public CustomerLabel parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CustomerLabel> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CustomerLabel> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.resources.CustomerLabel getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,903 | java-container/proto-google-cloud-container-v1beta1/src/main/java/com/google/container/v1beta1/GetServerConfigRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/container/v1beta1/cluster_service.proto
// Protobuf Java Version: 3.25.8
package com.google.container.v1beta1;
/**
*
*
* <pre>
* Gets the current Kubernetes Engine service configuration.
* </pre>
*
* Protobuf type {@code google.container.v1beta1.GetServerConfigRequest}
*/
public final class GetServerConfigRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.container.v1beta1.GetServerConfigRequest)
GetServerConfigRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetServerConfigRequest.newBuilder() to construct.
private GetServerConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetServerConfigRequest() {
projectId_ = "";
zone_ = "";
name_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new GetServerConfigRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_GetServerConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_GetServerConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1beta1.GetServerConfigRequest.class,
com.google.container.v1beta1.GetServerConfigRequest.Builder.class);
}
public static final int PROJECT_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object projectId_ = "";
/**
*
*
* <pre>
* Deprecated. The Google Developers Console [project ID or project
* number](https://cloud.google.com/resource-manager/docs/creating-managing-projects).
* This field has been deprecated and replaced by the name field.
* </pre>
*
* <code>string project_id = 1 [deprecated = true];</code>
*
* @deprecated google.container.v1beta1.GetServerConfigRequest.project_id is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=4580
* @return The projectId.
*/
@java.lang.Override
@java.lang.Deprecated
public java.lang.String getProjectId() {
java.lang.Object ref = projectId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
projectId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Deprecated. The Google Developers Console [project ID or project
* number](https://cloud.google.com/resource-manager/docs/creating-managing-projects).
* This field has been deprecated and replaced by the name field.
* </pre>
*
* <code>string project_id = 1 [deprecated = true];</code>
*
* @deprecated google.container.v1beta1.GetServerConfigRequest.project_id is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=4580
* @return The bytes for projectId.
*/
@java.lang.Override
@java.lang.Deprecated
public com.google.protobuf.ByteString getProjectIdBytes() {
java.lang.Object ref = projectId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
projectId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ZONE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object zone_ = "";
/**
*
*
* <pre>
* Deprecated. The name of the Google Compute Engine
* [zone](https://cloud.google.com/compute/docs/zones#available)
* to return operations for. This field has been deprecated and replaced by
* the name field.
* </pre>
*
* <code>string zone = 2 [deprecated = true];</code>
*
* @deprecated google.container.v1beta1.GetServerConfigRequest.zone is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=4586
* @return The zone.
*/
@java.lang.Override
@java.lang.Deprecated
public java.lang.String getZone() {
java.lang.Object ref = zone_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
zone_ = s;
return s;
}
}
/**
*
*
* <pre>
* Deprecated. The name of the Google Compute Engine
* [zone](https://cloud.google.com/compute/docs/zones#available)
* to return operations for. This field has been deprecated and replaced by
* the name field.
* </pre>
*
* <code>string zone = 2 [deprecated = true];</code>
*
* @deprecated google.container.v1beta1.GetServerConfigRequest.zone is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=4586
* @return The bytes for zone.
*/
@java.lang.Override
@java.lang.Deprecated
public com.google.protobuf.ByteString getZoneBytes() {
java.lang.Object ref = zone_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
zone_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int NAME_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* The name (project and location) of the server config to get,
* specified in the format `projects/*/locations/*`.
* </pre>
*
* <code>string name = 4;</code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name (project and location) of the server config to get,
* specified in the format `projects/*/locations/*`.
* </pre>
*
* <code>string name = 4;</code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, projectId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, zone_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, name_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, projectId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, zone_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, name_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.container.v1beta1.GetServerConfigRequest)) {
return super.equals(obj);
}
com.google.container.v1beta1.GetServerConfigRequest other =
(com.google.container.v1beta1.GetServerConfigRequest) obj;
if (!getProjectId().equals(other.getProjectId())) return false;
if (!getZone().equals(other.getZone())) return false;
if (!getName().equals(other.getName())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PROJECT_ID_FIELD_NUMBER;
hash = (53 * hash) + getProjectId().hashCode();
hash = (37 * hash) + ZONE_FIELD_NUMBER;
hash = (53 * hash) + getZone().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.container.v1beta1.GetServerConfigRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.GetServerConfigRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.GetServerConfigRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.GetServerConfigRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.GetServerConfigRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.GetServerConfigRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.GetServerConfigRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.GetServerConfigRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1beta1.GetServerConfigRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.GetServerConfigRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1beta1.GetServerConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.GetServerConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.container.v1beta1.GetServerConfigRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Gets the current Kubernetes Engine service configuration.
* </pre>
*
* Protobuf type {@code google.container.v1beta1.GetServerConfigRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.container.v1beta1.GetServerConfigRequest)
com.google.container.v1beta1.GetServerConfigRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_GetServerConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_GetServerConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1beta1.GetServerConfigRequest.class,
com.google.container.v1beta1.GetServerConfigRequest.Builder.class);
}
// Construct using com.google.container.v1beta1.GetServerConfigRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
projectId_ = "";
zone_ = "";
name_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_GetServerConfigRequest_descriptor;
}
@java.lang.Override
public com.google.container.v1beta1.GetServerConfigRequest getDefaultInstanceForType() {
return com.google.container.v1beta1.GetServerConfigRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.container.v1beta1.GetServerConfigRequest build() {
com.google.container.v1beta1.GetServerConfigRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.container.v1beta1.GetServerConfigRequest buildPartial() {
com.google.container.v1beta1.GetServerConfigRequest result =
new com.google.container.v1beta1.GetServerConfigRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.container.v1beta1.GetServerConfigRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.projectId_ = projectId_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.zone_ = zone_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.name_ = name_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.container.v1beta1.GetServerConfigRequest) {
return mergeFrom((com.google.container.v1beta1.GetServerConfigRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.container.v1beta1.GetServerConfigRequest other) {
if (other == com.google.container.v1beta1.GetServerConfigRequest.getDefaultInstance())
return this;
if (!other.getProjectId().isEmpty()) {
projectId_ = other.projectId_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getZone().isEmpty()) {
zone_ = other.zone_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
projectId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
zone_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 34:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object projectId_ = "";
/**
*
*
* <pre>
* Deprecated. The Google Developers Console [project ID or project
* number](https://cloud.google.com/resource-manager/docs/creating-managing-projects).
* This field has been deprecated and replaced by the name field.
* </pre>
*
* <code>string project_id = 1 [deprecated = true];</code>
*
* @deprecated google.container.v1beta1.GetServerConfigRequest.project_id is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=4580
* @return The projectId.
*/
@java.lang.Deprecated
public java.lang.String getProjectId() {
java.lang.Object ref = projectId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
projectId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Deprecated. The Google Developers Console [project ID or project
* number](https://cloud.google.com/resource-manager/docs/creating-managing-projects).
* This field has been deprecated and replaced by the name field.
* </pre>
*
* <code>string project_id = 1 [deprecated = true];</code>
*
* @deprecated google.container.v1beta1.GetServerConfigRequest.project_id is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=4580
* @return The bytes for projectId.
*/
@java.lang.Deprecated
public com.google.protobuf.ByteString getProjectIdBytes() {
java.lang.Object ref = projectId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
projectId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Deprecated. The Google Developers Console [project ID or project
* number](https://cloud.google.com/resource-manager/docs/creating-managing-projects).
* This field has been deprecated and replaced by the name field.
* </pre>
*
* <code>string project_id = 1 [deprecated = true];</code>
*
* @deprecated google.container.v1beta1.GetServerConfigRequest.project_id is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=4580
* @param value The projectId to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setProjectId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
projectId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Deprecated. The Google Developers Console [project ID or project
* number](https://cloud.google.com/resource-manager/docs/creating-managing-projects).
* This field has been deprecated and replaced by the name field.
* </pre>
*
* <code>string project_id = 1 [deprecated = true];</code>
*
* @deprecated google.container.v1beta1.GetServerConfigRequest.project_id is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=4580
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder clearProjectId() {
projectId_ = getDefaultInstance().getProjectId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Deprecated. The Google Developers Console [project ID or project
* number](https://cloud.google.com/resource-manager/docs/creating-managing-projects).
* This field has been deprecated and replaced by the name field.
* </pre>
*
* <code>string project_id = 1 [deprecated = true];</code>
*
* @deprecated google.container.v1beta1.GetServerConfigRequest.project_id is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=4580
* @param value The bytes for projectId to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setProjectIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
projectId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object zone_ = "";
/**
*
*
* <pre>
* Deprecated. The name of the Google Compute Engine
* [zone](https://cloud.google.com/compute/docs/zones#available)
* to return operations for. This field has been deprecated and replaced by
* the name field.
* </pre>
*
* <code>string zone = 2 [deprecated = true];</code>
*
* @deprecated google.container.v1beta1.GetServerConfigRequest.zone is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=4586
* @return The zone.
*/
@java.lang.Deprecated
public java.lang.String getZone() {
java.lang.Object ref = zone_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
zone_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Deprecated. The name of the Google Compute Engine
* [zone](https://cloud.google.com/compute/docs/zones#available)
* to return operations for. This field has been deprecated and replaced by
* the name field.
* </pre>
*
* <code>string zone = 2 [deprecated = true];</code>
*
* @deprecated google.container.v1beta1.GetServerConfigRequest.zone is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=4586
* @return The bytes for zone.
*/
@java.lang.Deprecated
public com.google.protobuf.ByteString getZoneBytes() {
java.lang.Object ref = zone_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
zone_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Deprecated. The name of the Google Compute Engine
* [zone](https://cloud.google.com/compute/docs/zones#available)
* to return operations for. This field has been deprecated and replaced by
* the name field.
* </pre>
*
* <code>string zone = 2 [deprecated = true];</code>
*
* @deprecated google.container.v1beta1.GetServerConfigRequest.zone is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=4586
* @param value The zone to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setZone(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
zone_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Deprecated. The name of the Google Compute Engine
* [zone](https://cloud.google.com/compute/docs/zones#available)
* to return operations for. This field has been deprecated and replaced by
* the name field.
* </pre>
*
* <code>string zone = 2 [deprecated = true];</code>
*
* @deprecated google.container.v1beta1.GetServerConfigRequest.zone is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=4586
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder clearZone() {
zone_ = getDefaultInstance().getZone();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Deprecated. The name of the Google Compute Engine
* [zone](https://cloud.google.com/compute/docs/zones#available)
* to return operations for. This field has been deprecated and replaced by
* the name field.
* </pre>
*
* <code>string zone = 2 [deprecated = true];</code>
*
* @deprecated google.container.v1beta1.GetServerConfigRequest.zone is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=4586
* @param value The bytes for zone to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setZoneBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
zone_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* The name (project and location) of the server config to get,
* specified in the format `projects/*/locations/*`.
* </pre>
*
* <code>string name = 4;</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name (project and location) of the server config to get,
* specified in the format `projects/*/locations/*`.
* </pre>
*
* <code>string name = 4;</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name (project and location) of the server config to get,
* specified in the format `projects/*/locations/*`.
* </pre>
*
* <code>string name = 4;</code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name (project and location) of the server config to get,
* specified in the format `projects/*/locations/*`.
* </pre>
*
* <code>string name = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The name (project and location) of the server config to get,
* specified in the format `projects/*/locations/*`.
* </pre>
*
* <code>string name = 4;</code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.container.v1beta1.GetServerConfigRequest)
}
// @@protoc_insertion_point(class_scope:google.container.v1beta1.GetServerConfigRequest)
private static final com.google.container.v1beta1.GetServerConfigRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.container.v1beta1.GetServerConfigRequest();
}
public static com.google.container.v1beta1.GetServerConfigRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GetServerConfigRequest> PARSER =
new com.google.protobuf.AbstractParser<GetServerConfigRequest>() {
@java.lang.Override
public GetServerConfigRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<GetServerConfigRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetServerConfigRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.container.v1beta1.GetServerConfigRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,834 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/CometSpec.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/evaluation_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Spec for Comet metric.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.CometSpec}
*/
public final class CometSpec extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.CometSpec)
CometSpecOrBuilder {
private static final long serialVersionUID = 0L;
// Use CometSpec.newBuilder() to construct.
private CometSpec(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CometSpec() {
version_ = 0;
sourceLanguage_ = "";
targetLanguage_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CometSpec();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_CometSpec_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_CometSpec_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.CometSpec.class,
com.google.cloud.aiplatform.v1.CometSpec.Builder.class);
}
/**
*
*
* <pre>
* Comet version options.
* </pre>
*
* Protobuf enum {@code google.cloud.aiplatform.v1.CometSpec.CometVersion}
*/
public enum CometVersion implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Comet version unspecified.
* </pre>
*
* <code>COMET_VERSION_UNSPECIFIED = 0;</code>
*/
COMET_VERSION_UNSPECIFIED(0),
/**
*
*
* <pre>
* Comet 22 for translation + source + reference
* (source-reference-combined).
* </pre>
*
* <code>COMET_22_SRC_REF = 2;</code>
*/
COMET_22_SRC_REF(2),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Comet version unspecified.
* </pre>
*
* <code>COMET_VERSION_UNSPECIFIED = 0;</code>
*/
public static final int COMET_VERSION_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Comet 22 for translation + source + reference
* (source-reference-combined).
* </pre>
*
* <code>COMET_22_SRC_REF = 2;</code>
*/
public static final int COMET_22_SRC_REF_VALUE = 2;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static CometVersion valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static CometVersion forNumber(int value) {
switch (value) {
case 0:
return COMET_VERSION_UNSPECIFIED;
case 2:
return COMET_22_SRC_REF;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<CometVersion> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<CometVersion> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<CometVersion>() {
public CometVersion findValueByNumber(int number) {
return CometVersion.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.CometSpec.getDescriptor().getEnumTypes().get(0);
}
private static final CometVersion[] VALUES = values();
public static CometVersion valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private CometVersion(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.aiplatform.v1.CometSpec.CometVersion)
}
private int bitField0_;
public static final int VERSION_FIELD_NUMBER = 1;
private int version_ = 0;
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1.CometSpec.CometVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the version field is set.
*/
@java.lang.Override
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1.CometSpec.CometVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The enum numeric value on the wire for version.
*/
@java.lang.Override
public int getVersionValue() {
return version_;
}
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1.CometSpec.CometVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The version.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.CometSpec.CometVersion getVersion() {
com.google.cloud.aiplatform.v1.CometSpec.CometVersion result =
com.google.cloud.aiplatform.v1.CometSpec.CometVersion.forNumber(version_);
return result == null
? com.google.cloud.aiplatform.v1.CometSpec.CometVersion.UNRECOGNIZED
: result;
}
public static final int SOURCE_LANGUAGE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object sourceLanguage_ = "";
/**
*
*
* <pre>
* Optional. Source language in BCP-47 format.
* </pre>
*
* <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The sourceLanguage.
*/
@java.lang.Override
public java.lang.String getSourceLanguage() {
java.lang.Object ref = sourceLanguage_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
sourceLanguage_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Source language in BCP-47 format.
* </pre>
*
* <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for sourceLanguage.
*/
@java.lang.Override
public com.google.protobuf.ByteString getSourceLanguageBytes() {
java.lang.Object ref = sourceLanguage_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
sourceLanguage_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TARGET_LANGUAGE_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object targetLanguage_ = "";
/**
*
*
* <pre>
* Optional. Target language in BCP-47 format. Covers both prediction and
* reference.
* </pre>
*
* <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The targetLanguage.
*/
@java.lang.Override
public java.lang.String getTargetLanguage() {
java.lang.Object ref = targetLanguage_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
targetLanguage_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Target language in BCP-47 format. Covers both prediction and
* reference.
* </pre>
*
* <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for targetLanguage.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTargetLanguageBytes() {
java.lang.Object ref = targetLanguage_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
targetLanguage_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeEnum(1, version_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceLanguage_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, sourceLanguage_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(targetLanguage_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, targetLanguage_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, version_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceLanguage_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, sourceLanguage_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(targetLanguage_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, targetLanguage_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.CometSpec)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.CometSpec other = (com.google.cloud.aiplatform.v1.CometSpec) obj;
if (hasVersion() != other.hasVersion()) return false;
if (hasVersion()) {
if (version_ != other.version_) return false;
}
if (!getSourceLanguage().equals(other.getSourceLanguage())) return false;
if (!getTargetLanguage().equals(other.getTargetLanguage())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasVersion()) {
hash = (37 * hash) + VERSION_FIELD_NUMBER;
hash = (53 * hash) + version_;
}
hash = (37 * hash) + SOURCE_LANGUAGE_FIELD_NUMBER;
hash = (53 * hash) + getSourceLanguage().hashCode();
hash = (37 * hash) + TARGET_LANGUAGE_FIELD_NUMBER;
hash = (53 * hash) + getTargetLanguage().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.CometSpec parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.CometSpec parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CometSpec parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.CometSpec parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CometSpec parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.CometSpec parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CometSpec parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.CometSpec parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CometSpec parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.CometSpec parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CometSpec parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.CometSpec parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.aiplatform.v1.CometSpec prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Spec for Comet metric.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.CometSpec}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.CometSpec)
com.google.cloud.aiplatform.v1.CometSpecOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_CometSpec_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_CometSpec_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.CometSpec.class,
com.google.cloud.aiplatform.v1.CometSpec.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.CometSpec.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
version_ = 0;
sourceLanguage_ = "";
targetLanguage_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_CometSpec_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CometSpec getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.CometSpec.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CometSpec build() {
com.google.cloud.aiplatform.v1.CometSpec result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CometSpec buildPartial() {
com.google.cloud.aiplatform.v1.CometSpec result =
new com.google.cloud.aiplatform.v1.CometSpec(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.aiplatform.v1.CometSpec result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.version_ = version_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.sourceLanguage_ = sourceLanguage_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.targetLanguage_ = targetLanguage_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.CometSpec) {
return mergeFrom((com.google.cloud.aiplatform.v1.CometSpec) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.CometSpec other) {
if (other == com.google.cloud.aiplatform.v1.CometSpec.getDefaultInstance()) return this;
if (other.hasVersion()) {
setVersion(other.getVersion());
}
if (!other.getSourceLanguage().isEmpty()) {
sourceLanguage_ = other.sourceLanguage_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getTargetLanguage().isEmpty()) {
targetLanguage_ = other.targetLanguage_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
version_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
sourceLanguage_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
targetLanguage_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int version_ = 0;
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1.CometSpec.CometVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the version field is set.
*/
@java.lang.Override
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1.CometSpec.CometVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The enum numeric value on the wire for version.
*/
@java.lang.Override
public int getVersionValue() {
return version_;
}
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1.CometSpec.CometVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param value The enum numeric value on the wire for version to set.
* @return This builder for chaining.
*/
public Builder setVersionValue(int value) {
version_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1.CometSpec.CometVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The version.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.CometSpec.CometVersion getVersion() {
com.google.cloud.aiplatform.v1.CometSpec.CometVersion result =
com.google.cloud.aiplatform.v1.CometSpec.CometVersion.forNumber(version_);
return result == null
? com.google.cloud.aiplatform.v1.CometSpec.CometVersion.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1.CometSpec.CometVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param value The version to set.
* @return This builder for chaining.
*/
public Builder setVersion(com.google.cloud.aiplatform.v1.CometSpec.CometVersion value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
version_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1.CometSpec.CometVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearVersion() {
bitField0_ = (bitField0_ & ~0x00000001);
version_ = 0;
onChanged();
return this;
}
private java.lang.Object sourceLanguage_ = "";
/**
*
*
* <pre>
* Optional. Source language in BCP-47 format.
* </pre>
*
* <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The sourceLanguage.
*/
public java.lang.String getSourceLanguage() {
java.lang.Object ref = sourceLanguage_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
sourceLanguage_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Source language in BCP-47 format.
* </pre>
*
* <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for sourceLanguage.
*/
public com.google.protobuf.ByteString getSourceLanguageBytes() {
java.lang.Object ref = sourceLanguage_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
sourceLanguage_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Source language in BCP-47 format.
* </pre>
*
* <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The sourceLanguage to set.
* @return This builder for chaining.
*/
public Builder setSourceLanguage(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
sourceLanguage_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Source language in BCP-47 format.
* </pre>
*
* <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearSourceLanguage() {
sourceLanguage_ = getDefaultInstance().getSourceLanguage();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Source language in BCP-47 format.
* </pre>
*
* <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for sourceLanguage to set.
* @return This builder for chaining.
*/
public Builder setSourceLanguageBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
sourceLanguage_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object targetLanguage_ = "";
/**
*
*
* <pre>
* Optional. Target language in BCP-47 format. Covers both prediction and
* reference.
* </pre>
*
* <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The targetLanguage.
*/
public java.lang.String getTargetLanguage() {
java.lang.Object ref = targetLanguage_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
targetLanguage_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Target language in BCP-47 format. Covers both prediction and
* reference.
* </pre>
*
* <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for targetLanguage.
*/
public com.google.protobuf.ByteString getTargetLanguageBytes() {
java.lang.Object ref = targetLanguage_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
targetLanguage_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Target language in BCP-47 format. Covers both prediction and
* reference.
* </pre>
*
* <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The targetLanguage to set.
* @return This builder for chaining.
*/
public Builder setTargetLanguage(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
targetLanguage_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Target language in BCP-47 format. Covers both prediction and
* reference.
* </pre>
*
* <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearTargetLanguage() {
targetLanguage_ = getDefaultInstance().getTargetLanguage();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Target language in BCP-47 format. Covers both prediction and
* reference.
* </pre>
*
* <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for targetLanguage to set.
* @return This builder for chaining.
*/
public Builder setTargetLanguageBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
targetLanguage_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.CometSpec)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.CometSpec)
private static final com.google.cloud.aiplatform.v1.CometSpec DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.CometSpec();
}
public static com.google.cloud.aiplatform.v1.CometSpec getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CometSpec> PARSER =
new com.google.protobuf.AbstractParser<CometSpec>() {
@java.lang.Override
public CometSpec parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CometSpec> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CometSpec> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CometSpec getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,901 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateDatasetRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/dataset_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* Request message for
* [DatasetService.UpdateDataset][google.cloud.aiplatform.v1beta1.DatasetService.UpdateDataset].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.UpdateDatasetRequest}
*/
public final class UpdateDatasetRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.UpdateDatasetRequest)
UpdateDatasetRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateDatasetRequest.newBuilder() to construct.
private UpdateDatasetRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateDatasetRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateDatasetRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_UpdateDatasetRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_UpdateDatasetRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest.class,
com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest.Builder.class);
}
private int bitField0_;
public static final int DATASET_FIELD_NUMBER = 1;
private com.google.cloud.aiplatform.v1beta1.Dataset dataset_;
/**
*
*
* <pre>
* Required. The Dataset which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Dataset dataset = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the dataset field is set.
*/
@java.lang.Override
public boolean hasDataset() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The Dataset which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Dataset dataset = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The dataset.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.Dataset getDataset() {
return dataset_ == null
? com.google.cloud.aiplatform.v1beta1.Dataset.getDefaultInstance()
: dataset_;
}
/**
*
*
* <pre>
* Required. The Dataset which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Dataset dataset = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.DatasetOrBuilder getDatasetOrBuilder() {
return dataset_ == null
? com.google.cloud.aiplatform.v1beta1.Dataset.getDefaultInstance()
: dataset_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* [google.protobuf.FieldMask][google.protobuf.FieldMask]. Updatable fields:
*
* * `display_name`
* * `description`
* * `labels`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* [google.protobuf.FieldMask][google.protobuf.FieldMask]. Updatable fields:
*
* * `display_name`
* * `description`
* * `labels`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* [google.protobuf.FieldMask][google.protobuf.FieldMask]. Updatable fields:
*
* * `display_name`
* * `description`
* * `labels`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getDataset());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getDataset());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest other =
(com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest) obj;
if (hasDataset() != other.hasDataset()) return false;
if (hasDataset()) {
if (!getDataset().equals(other.getDataset())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasDataset()) {
hash = (37 * hash) + DATASET_FIELD_NUMBER;
hash = (53 * hash) + getDataset().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [DatasetService.UpdateDataset][google.cloud.aiplatform.v1beta1.DatasetService.UpdateDataset].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.UpdateDatasetRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.UpdateDatasetRequest)
com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_UpdateDatasetRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_UpdateDatasetRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest.class,
com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getDatasetFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
dataset_ = null;
if (datasetBuilder_ != null) {
datasetBuilder_.dispose();
datasetBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_UpdateDatasetRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest build() {
com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest buildPartial() {
com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest result =
new com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.dataset_ = datasetBuilder_ == null ? dataset_ : datasetBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest) {
return mergeFrom((com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest other) {
if (other == com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest.getDefaultInstance())
return this;
if (other.hasDataset()) {
mergeDataset(other.getDataset());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getDatasetFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.aiplatform.v1beta1.Dataset dataset_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Dataset,
com.google.cloud.aiplatform.v1beta1.Dataset.Builder,
com.google.cloud.aiplatform.v1beta1.DatasetOrBuilder>
datasetBuilder_;
/**
*
*
* <pre>
* Required. The Dataset which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Dataset dataset = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the dataset field is set.
*/
public boolean hasDataset() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The Dataset which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Dataset dataset = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The dataset.
*/
public com.google.cloud.aiplatform.v1beta1.Dataset getDataset() {
if (datasetBuilder_ == null) {
return dataset_ == null
? com.google.cloud.aiplatform.v1beta1.Dataset.getDefaultInstance()
: dataset_;
} else {
return datasetBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The Dataset which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Dataset dataset = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDataset(com.google.cloud.aiplatform.v1beta1.Dataset value) {
if (datasetBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
dataset_ = value;
} else {
datasetBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Dataset which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Dataset dataset = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDataset(com.google.cloud.aiplatform.v1beta1.Dataset.Builder builderForValue) {
if (datasetBuilder_ == null) {
dataset_ = builderForValue.build();
} else {
datasetBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Dataset which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Dataset dataset = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeDataset(com.google.cloud.aiplatform.v1beta1.Dataset value) {
if (datasetBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& dataset_ != null
&& dataset_ != com.google.cloud.aiplatform.v1beta1.Dataset.getDefaultInstance()) {
getDatasetBuilder().mergeFrom(value);
} else {
dataset_ = value;
}
} else {
datasetBuilder_.mergeFrom(value);
}
if (dataset_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The Dataset which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Dataset dataset = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearDataset() {
bitField0_ = (bitField0_ & ~0x00000001);
dataset_ = null;
if (datasetBuilder_ != null) {
datasetBuilder_.dispose();
datasetBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Dataset which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Dataset dataset = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.Dataset.Builder getDatasetBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getDatasetFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The Dataset which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Dataset dataset = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.DatasetOrBuilder getDatasetOrBuilder() {
if (datasetBuilder_ != null) {
return datasetBuilder_.getMessageOrBuilder();
} else {
return dataset_ == null
? com.google.cloud.aiplatform.v1beta1.Dataset.getDefaultInstance()
: dataset_;
}
}
/**
*
*
* <pre>
* Required. The Dataset which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Dataset dataset = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Dataset,
com.google.cloud.aiplatform.v1beta1.Dataset.Builder,
com.google.cloud.aiplatform.v1beta1.DatasetOrBuilder>
getDatasetFieldBuilder() {
if (datasetBuilder_ == null) {
datasetBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Dataset,
com.google.cloud.aiplatform.v1beta1.Dataset.Builder,
com.google.cloud.aiplatform.v1beta1.DatasetOrBuilder>(
getDataset(), getParentForChildren(), isClean());
dataset_ = null;
}
return datasetBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* [google.protobuf.FieldMask][google.protobuf.FieldMask]. Updatable fields:
*
* * `display_name`
* * `description`
* * `labels`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* [google.protobuf.FieldMask][google.protobuf.FieldMask]. Updatable fields:
*
* * `display_name`
* * `description`
* * `labels`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* [google.protobuf.FieldMask][google.protobuf.FieldMask]. Updatable fields:
*
* * `display_name`
* * `description`
* * `labels`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* [google.protobuf.FieldMask][google.protobuf.FieldMask]. Updatable fields:
*
* * `display_name`
* * `description`
* * `labels`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* [google.protobuf.FieldMask][google.protobuf.FieldMask]. Updatable fields:
*
* * `display_name`
* * `description`
* * `labels`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* [google.protobuf.FieldMask][google.protobuf.FieldMask]. Updatable fields:
*
* * `display_name`
* * `description`
* * `labels`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* [google.protobuf.FieldMask][google.protobuf.FieldMask]. Updatable fields:
*
* * `display_name`
* * `description`
* * `labels`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* [google.protobuf.FieldMask][google.protobuf.FieldMask]. Updatable fields:
*
* * `display_name`
* * `description`
* * `labels`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* [google.protobuf.FieldMask][google.protobuf.FieldMask]. Updatable fields:
*
* * `display_name`
* * `description`
* * `labels`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.UpdateDatasetRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.UpdateDatasetRequest)
private static final com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest();
}
public static com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateDatasetRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateDatasetRequest>() {
@java.lang.Override
public UpdateDatasetRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateDatasetRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateDatasetRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.UpdateDatasetRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,910 | java-gkehub/proto-google-cloud-gkehub-v1/src/main/java/com/google/cloud/gkehub/v1/MembershipFeatureState.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/gkehub/v1/feature.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.gkehub.v1;
/**
*
*
* <pre>
* MembershipFeatureState contains Feature status information for a single
* Membership.
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.v1.MembershipFeatureState}
*/
public final class MembershipFeatureState extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.gkehub.v1.MembershipFeatureState)
MembershipFeatureStateOrBuilder {
private static final long serialVersionUID = 0L;
// Use MembershipFeatureState.newBuilder() to construct.
private MembershipFeatureState(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MembershipFeatureState() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new MembershipFeatureState();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.v1.FeatureProto
.internal_static_google_cloud_gkehub_v1_MembershipFeatureState_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.v1.FeatureProto
.internal_static_google_cloud_gkehub_v1_MembershipFeatureState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.v1.MembershipFeatureState.class,
com.google.cloud.gkehub.v1.MembershipFeatureState.Builder.class);
}
private int bitField0_;
private int featureStateCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object featureState_;
public enum FeatureStateCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
CONFIGMANAGEMENT(106),
FEATURESTATE_NOT_SET(0);
private final int value;
private FeatureStateCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static FeatureStateCase valueOf(int value) {
return forNumber(value);
}
public static FeatureStateCase forNumber(int value) {
switch (value) {
case 106:
return CONFIGMANAGEMENT;
case 0:
return FEATURESTATE_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public FeatureStateCase getFeatureStateCase() {
return FeatureStateCase.forNumber(featureStateCase_);
}
public static final int CONFIGMANAGEMENT_FIELD_NUMBER = 106;
/**
*
*
* <pre>
* Config Management-specific state.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.MembershipState configmanagement = 106;</code>
*
* @return Whether the configmanagement field is set.
*/
@java.lang.Override
public boolean hasConfigmanagement() {
return featureStateCase_ == 106;
}
/**
*
*
* <pre>
* Config Management-specific state.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.MembershipState configmanagement = 106;</code>
*
* @return The configmanagement.
*/
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1.MembershipState getConfigmanagement() {
if (featureStateCase_ == 106) {
return (com.google.cloud.gkehub.configmanagement.v1.MembershipState) featureState_;
}
return com.google.cloud.gkehub.configmanagement.v1.MembershipState.getDefaultInstance();
}
/**
*
*
* <pre>
* Config Management-specific state.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.MembershipState configmanagement = 106;</code>
*/
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1.MembershipStateOrBuilder
getConfigmanagementOrBuilder() {
if (featureStateCase_ == 106) {
return (com.google.cloud.gkehub.configmanagement.v1.MembershipState) featureState_;
}
return com.google.cloud.gkehub.configmanagement.v1.MembershipState.getDefaultInstance();
}
public static final int STATE_FIELD_NUMBER = 1;
private com.google.cloud.gkehub.v1.FeatureState state_;
/**
*
*
* <pre>
* The high-level state of this Feature for a single membership.
* </pre>
*
* <code>.google.cloud.gkehub.v1.FeatureState state = 1;</code>
*
* @return Whether the state field is set.
*/
@java.lang.Override
public boolean hasState() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The high-level state of this Feature for a single membership.
* </pre>
*
* <code>.google.cloud.gkehub.v1.FeatureState state = 1;</code>
*
* @return The state.
*/
@java.lang.Override
public com.google.cloud.gkehub.v1.FeatureState getState() {
return state_ == null ? com.google.cloud.gkehub.v1.FeatureState.getDefaultInstance() : state_;
}
/**
*
*
* <pre>
* The high-level state of this Feature for a single membership.
* </pre>
*
* <code>.google.cloud.gkehub.v1.FeatureState state = 1;</code>
*/
@java.lang.Override
public com.google.cloud.gkehub.v1.FeatureStateOrBuilder getStateOrBuilder() {
return state_ == null ? com.google.cloud.gkehub.v1.FeatureState.getDefaultInstance() : state_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getState());
}
if (featureStateCase_ == 106) {
output.writeMessage(
106, (com.google.cloud.gkehub.configmanagement.v1.MembershipState) featureState_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getState());
}
if (featureStateCase_ == 106) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
106, (com.google.cloud.gkehub.configmanagement.v1.MembershipState) featureState_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.gkehub.v1.MembershipFeatureState)) {
return super.equals(obj);
}
com.google.cloud.gkehub.v1.MembershipFeatureState other =
(com.google.cloud.gkehub.v1.MembershipFeatureState) obj;
if (hasState() != other.hasState()) return false;
if (hasState()) {
if (!getState().equals(other.getState())) return false;
}
if (!getFeatureStateCase().equals(other.getFeatureStateCase())) return false;
switch (featureStateCase_) {
case 106:
if (!getConfigmanagement().equals(other.getConfigmanagement())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasState()) {
hash = (37 * hash) + STATE_FIELD_NUMBER;
hash = (53 * hash) + getState().hashCode();
}
switch (featureStateCase_) {
case 106:
hash = (37 * hash) + CONFIGMANAGEMENT_FIELD_NUMBER;
hash = (53 * hash) + getConfigmanagement().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.gkehub.v1.MembershipFeatureState parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.v1.MembershipFeatureState parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.v1.MembershipFeatureState parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.v1.MembershipFeatureState parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.v1.MembershipFeatureState parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.v1.MembershipFeatureState parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.v1.MembershipFeatureState parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.v1.MembershipFeatureState parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.v1.MembershipFeatureState parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.v1.MembershipFeatureState parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.v1.MembershipFeatureState parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.v1.MembershipFeatureState parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.gkehub.v1.MembershipFeatureState prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* MembershipFeatureState contains Feature status information for a single
* Membership.
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.v1.MembershipFeatureState}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.gkehub.v1.MembershipFeatureState)
com.google.cloud.gkehub.v1.MembershipFeatureStateOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.v1.FeatureProto
.internal_static_google_cloud_gkehub_v1_MembershipFeatureState_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.v1.FeatureProto
.internal_static_google_cloud_gkehub_v1_MembershipFeatureState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.v1.MembershipFeatureState.class,
com.google.cloud.gkehub.v1.MembershipFeatureState.Builder.class);
}
// Construct using com.google.cloud.gkehub.v1.MembershipFeatureState.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getStateFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (configmanagementBuilder_ != null) {
configmanagementBuilder_.clear();
}
state_ = null;
if (stateBuilder_ != null) {
stateBuilder_.dispose();
stateBuilder_ = null;
}
featureStateCase_ = 0;
featureState_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.gkehub.v1.FeatureProto
.internal_static_google_cloud_gkehub_v1_MembershipFeatureState_descriptor;
}
@java.lang.Override
public com.google.cloud.gkehub.v1.MembershipFeatureState getDefaultInstanceForType() {
return com.google.cloud.gkehub.v1.MembershipFeatureState.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.gkehub.v1.MembershipFeatureState build() {
com.google.cloud.gkehub.v1.MembershipFeatureState result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.gkehub.v1.MembershipFeatureState buildPartial() {
com.google.cloud.gkehub.v1.MembershipFeatureState result =
new com.google.cloud.gkehub.v1.MembershipFeatureState(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.gkehub.v1.MembershipFeatureState result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.state_ = stateBuilder_ == null ? state_ : stateBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
private void buildPartialOneofs(com.google.cloud.gkehub.v1.MembershipFeatureState result) {
result.featureStateCase_ = featureStateCase_;
result.featureState_ = this.featureState_;
if (featureStateCase_ == 106 && configmanagementBuilder_ != null) {
result.featureState_ = configmanagementBuilder_.build();
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.gkehub.v1.MembershipFeatureState) {
return mergeFrom((com.google.cloud.gkehub.v1.MembershipFeatureState) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.gkehub.v1.MembershipFeatureState other) {
if (other == com.google.cloud.gkehub.v1.MembershipFeatureState.getDefaultInstance())
return this;
if (other.hasState()) {
mergeState(other.getState());
}
switch (other.getFeatureStateCase()) {
case CONFIGMANAGEMENT:
{
mergeConfigmanagement(other.getConfigmanagement());
break;
}
case FEATURESTATE_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getStateFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 10
case 850:
{
input.readMessage(
getConfigmanagementFieldBuilder().getBuilder(), extensionRegistry);
featureStateCase_ = 106;
break;
} // case 850
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int featureStateCase_ = 0;
private java.lang.Object featureState_;
public FeatureStateCase getFeatureStateCase() {
return FeatureStateCase.forNumber(featureStateCase_);
}
public Builder clearFeatureState() {
featureStateCase_ = 0;
featureState_ = null;
onChanged();
return this;
}
private int bitField0_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.configmanagement.v1.MembershipState,
com.google.cloud.gkehub.configmanagement.v1.MembershipState.Builder,
com.google.cloud.gkehub.configmanagement.v1.MembershipStateOrBuilder>
configmanagementBuilder_;
/**
*
*
* <pre>
* Config Management-specific state.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.MembershipState configmanagement = 106;</code>
*
* @return Whether the configmanagement field is set.
*/
@java.lang.Override
public boolean hasConfigmanagement() {
return featureStateCase_ == 106;
}
/**
*
*
* <pre>
* Config Management-specific state.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.MembershipState configmanagement = 106;</code>
*
* @return The configmanagement.
*/
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1.MembershipState getConfigmanagement() {
if (configmanagementBuilder_ == null) {
if (featureStateCase_ == 106) {
return (com.google.cloud.gkehub.configmanagement.v1.MembershipState) featureState_;
}
return com.google.cloud.gkehub.configmanagement.v1.MembershipState.getDefaultInstance();
} else {
if (featureStateCase_ == 106) {
return configmanagementBuilder_.getMessage();
}
return com.google.cloud.gkehub.configmanagement.v1.MembershipState.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Config Management-specific state.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.MembershipState configmanagement = 106;</code>
*/
public Builder setConfigmanagement(
com.google.cloud.gkehub.configmanagement.v1.MembershipState value) {
if (configmanagementBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
featureState_ = value;
onChanged();
} else {
configmanagementBuilder_.setMessage(value);
}
featureStateCase_ = 106;
return this;
}
/**
*
*
* <pre>
* Config Management-specific state.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.MembershipState configmanagement = 106;</code>
*/
public Builder setConfigmanagement(
com.google.cloud.gkehub.configmanagement.v1.MembershipState.Builder builderForValue) {
if (configmanagementBuilder_ == null) {
featureState_ = builderForValue.build();
onChanged();
} else {
configmanagementBuilder_.setMessage(builderForValue.build());
}
featureStateCase_ = 106;
return this;
}
/**
*
*
* <pre>
* Config Management-specific state.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.MembershipState configmanagement = 106;</code>
*/
public Builder mergeConfigmanagement(
com.google.cloud.gkehub.configmanagement.v1.MembershipState value) {
if (configmanagementBuilder_ == null) {
if (featureStateCase_ == 106
&& featureState_
!= com.google.cloud.gkehub.configmanagement.v1.MembershipState
.getDefaultInstance()) {
featureState_ =
com.google.cloud.gkehub.configmanagement.v1.MembershipState.newBuilder(
(com.google.cloud.gkehub.configmanagement.v1.MembershipState) featureState_)
.mergeFrom(value)
.buildPartial();
} else {
featureState_ = value;
}
onChanged();
} else {
if (featureStateCase_ == 106) {
configmanagementBuilder_.mergeFrom(value);
} else {
configmanagementBuilder_.setMessage(value);
}
}
featureStateCase_ = 106;
return this;
}
/**
*
*
* <pre>
* Config Management-specific state.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.MembershipState configmanagement = 106;</code>
*/
public Builder clearConfigmanagement() {
if (configmanagementBuilder_ == null) {
if (featureStateCase_ == 106) {
featureStateCase_ = 0;
featureState_ = null;
onChanged();
}
} else {
if (featureStateCase_ == 106) {
featureStateCase_ = 0;
featureState_ = null;
}
configmanagementBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Config Management-specific state.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.MembershipState configmanagement = 106;</code>
*/
public com.google.cloud.gkehub.configmanagement.v1.MembershipState.Builder
getConfigmanagementBuilder() {
return getConfigmanagementFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Config Management-specific state.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.MembershipState configmanagement = 106;</code>
*/
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1.MembershipStateOrBuilder
getConfigmanagementOrBuilder() {
if ((featureStateCase_ == 106) && (configmanagementBuilder_ != null)) {
return configmanagementBuilder_.getMessageOrBuilder();
} else {
if (featureStateCase_ == 106) {
return (com.google.cloud.gkehub.configmanagement.v1.MembershipState) featureState_;
}
return com.google.cloud.gkehub.configmanagement.v1.MembershipState.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Config Management-specific state.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.MembershipState configmanagement = 106;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.configmanagement.v1.MembershipState,
com.google.cloud.gkehub.configmanagement.v1.MembershipState.Builder,
com.google.cloud.gkehub.configmanagement.v1.MembershipStateOrBuilder>
getConfigmanagementFieldBuilder() {
if (configmanagementBuilder_ == null) {
if (!(featureStateCase_ == 106)) {
featureState_ =
com.google.cloud.gkehub.configmanagement.v1.MembershipState.getDefaultInstance();
}
configmanagementBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.configmanagement.v1.MembershipState,
com.google.cloud.gkehub.configmanagement.v1.MembershipState.Builder,
com.google.cloud.gkehub.configmanagement.v1.MembershipStateOrBuilder>(
(com.google.cloud.gkehub.configmanagement.v1.MembershipState) featureState_,
getParentForChildren(),
isClean());
featureState_ = null;
}
featureStateCase_ = 106;
onChanged();
return configmanagementBuilder_;
}
private com.google.cloud.gkehub.v1.FeatureState state_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.v1.FeatureState,
com.google.cloud.gkehub.v1.FeatureState.Builder,
com.google.cloud.gkehub.v1.FeatureStateOrBuilder>
stateBuilder_;
/**
*
*
* <pre>
* The high-level state of this Feature for a single membership.
* </pre>
*
* <code>.google.cloud.gkehub.v1.FeatureState state = 1;</code>
*
* @return Whether the state field is set.
*/
public boolean hasState() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The high-level state of this Feature for a single membership.
* </pre>
*
* <code>.google.cloud.gkehub.v1.FeatureState state = 1;</code>
*
* @return The state.
*/
public com.google.cloud.gkehub.v1.FeatureState getState() {
if (stateBuilder_ == null) {
return state_ == null
? com.google.cloud.gkehub.v1.FeatureState.getDefaultInstance()
: state_;
} else {
return stateBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The high-level state of this Feature for a single membership.
* </pre>
*
* <code>.google.cloud.gkehub.v1.FeatureState state = 1;</code>
*/
public Builder setState(com.google.cloud.gkehub.v1.FeatureState value) {
if (stateBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
state_ = value;
} else {
stateBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The high-level state of this Feature for a single membership.
* </pre>
*
* <code>.google.cloud.gkehub.v1.FeatureState state = 1;</code>
*/
public Builder setState(com.google.cloud.gkehub.v1.FeatureState.Builder builderForValue) {
if (stateBuilder_ == null) {
state_ = builderForValue.build();
} else {
stateBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The high-level state of this Feature for a single membership.
* </pre>
*
* <code>.google.cloud.gkehub.v1.FeatureState state = 1;</code>
*/
public Builder mergeState(com.google.cloud.gkehub.v1.FeatureState value) {
if (stateBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& state_ != null
&& state_ != com.google.cloud.gkehub.v1.FeatureState.getDefaultInstance()) {
getStateBuilder().mergeFrom(value);
} else {
state_ = value;
}
} else {
stateBuilder_.mergeFrom(value);
}
if (state_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The high-level state of this Feature for a single membership.
* </pre>
*
* <code>.google.cloud.gkehub.v1.FeatureState state = 1;</code>
*/
public Builder clearState() {
bitField0_ = (bitField0_ & ~0x00000002);
state_ = null;
if (stateBuilder_ != null) {
stateBuilder_.dispose();
stateBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The high-level state of this Feature for a single membership.
* </pre>
*
* <code>.google.cloud.gkehub.v1.FeatureState state = 1;</code>
*/
public com.google.cloud.gkehub.v1.FeatureState.Builder getStateBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getStateFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The high-level state of this Feature for a single membership.
* </pre>
*
* <code>.google.cloud.gkehub.v1.FeatureState state = 1;</code>
*/
public com.google.cloud.gkehub.v1.FeatureStateOrBuilder getStateOrBuilder() {
if (stateBuilder_ != null) {
return stateBuilder_.getMessageOrBuilder();
} else {
return state_ == null
? com.google.cloud.gkehub.v1.FeatureState.getDefaultInstance()
: state_;
}
}
/**
*
*
* <pre>
* The high-level state of this Feature for a single membership.
* </pre>
*
* <code>.google.cloud.gkehub.v1.FeatureState state = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.v1.FeatureState,
com.google.cloud.gkehub.v1.FeatureState.Builder,
com.google.cloud.gkehub.v1.FeatureStateOrBuilder>
getStateFieldBuilder() {
if (stateBuilder_ == null) {
stateBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.v1.FeatureState,
com.google.cloud.gkehub.v1.FeatureState.Builder,
com.google.cloud.gkehub.v1.FeatureStateOrBuilder>(
getState(), getParentForChildren(), isClean());
state_ = null;
}
return stateBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.gkehub.v1.MembershipFeatureState)
}
// @@protoc_insertion_point(class_scope:google.cloud.gkehub.v1.MembershipFeatureState)
private static final com.google.cloud.gkehub.v1.MembershipFeatureState DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.gkehub.v1.MembershipFeatureState();
}
public static com.google.cloud.gkehub.v1.MembershipFeatureState getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MembershipFeatureState> PARSER =
new com.google.protobuf.AbstractParser<MembershipFeatureState>() {
@java.lang.Override
public MembershipFeatureState parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<MembershipFeatureState> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MembershipFeatureState> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.gkehub.v1.MembershipFeatureState getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/commons-math | 36,286 | commons-math-legacy/src/test/java/org/apache/commons/math4/legacy/special/BesselJTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math4.legacy.special;
import org.apache.commons.math4.legacy.exception.MathIllegalArgumentException;
import org.junit.Assert;
import org.junit.Test;
/**
*/
public class BesselJTest {
/**
* Reference data for the {@link BesselJ#value(double, double)} function. This data
* was generated with the following <a href="http://www.r-project.org/">R</a> script.
*
* <pre>
* smallxs = 10 ** (-(seq(0,8,.5)))
* medxs = seq(1,20)
* near.eight = 8 + seq(-.5,.5,.1)
* largexs = c(10,30,100,300,1000)
* xs = unique(sort(c(smallxs, medxs, near.eight,largexs)))
*
* for (n in c(0:15, 30, 100)) {
* for (x in xs) {
* val = format( besselJ(x,n), digits=20 )
* s = paste("{", n, ",", x, ",", val, "},")
* print(s)
* }
* }
* </pre>
*/
private static final double[][] BESSEL_J_REF = {
{ 0 , 1e-08 , 1 },
{ 0 , 3.16227766016838e-08 , 0.99999999999999977796 },
{ 0 , 1e-07 , 0.99999999999999744649 },
{ 0 , 3.16227766016838e-07 , 0.99999999999997501998 },
{ 0 , 1e-06 , 0.99999999999974997777 },
{ 0 , 3.16227766016838e-06 , 0.99999999999749999979 },
{ 0 , 1e-05 , 0.99999999997499999793 },
{ 0 , 3.16227766016838e-05 , 0.99999999974999997931 },
{ 0 , 1e-04 , 0.99999999750000001519 },
{ 0 , 0.000316227766016838 , 0.99999997500000015194 },
{ 0 , 0.001 , 0.9999997500000156192 },
{ 0 , 0.00316227766016838 , 0.99999750000156251151 },
{ 0 , 0.01 , 0.99997500015624951608 },
{ 0 , 0.0316227766016838 , 0.9997500156245660019 },
{ 0 , 0.1 , 0.99750156206604001508 },
{ 0 , 0.316227766016838 , 0.97515581664971295872 },
{ 0 , 1 , 0.76519768655796649437 },
{ 0 , 2 , 0.22389077914123567403 },
{ 0 , 3 , -0.26005195490193344643 },
{ 0 , 4 , -0.39714980986384734729 },
{ 0 , 5 , -0.177596771314338292 },
{ 0 , 6 , 0.15064525725099692233 },
{ 0 , 7 , 0.3000792705195556298 },
{ 0 , 7.5 , 0.26633965788037838873 },
{ 0 , 7.6 , 0.25160183384997636402 },
{ 0 , 7.7 , 0.23455913958646437689 },
{ 0 , 7.8 , 0.21540780774626291927 },
{ 0 , 7.9 , 0.19436184484127824734 },
{ 0 , 8 , 0.17165080713755390129 },
{ 0 , 8.1 , 0.14751745404437766052 },
{ 0 , 8.2 , 0.12221530178413773926 },
{ 0 , 8.3 , 0.09600610089501022959 },
{ 0 , 8.4 , 0.069157261656985186127 },
{ 0 , 8.5 , 0.041939251842934503756 },
{ 0 , 9 , -0.090333611182876139001 },
{ 0 , 10 , -0.24593576445134832098 },
{ 0 , 11 , -0.17119030040719609986 },
{ 0 , 12 , 0.0476893107968335353 },
{ 0 , 13 , 0.20692610237706782206 },
{ 0 , 14 , 0.17107347611045864433 },
{ 0 , 15 , -0.014224472826780772475 },
{ 0 , 16 , -0.17489907398362919411 },
{ 0 , 17 , -0.16985425215118354902 },
{ 0 , 18 , -0.013355805721984111492 },
{ 0 , 19 , 0.14662943965965119508 },
{ 0 , 20 , 0.16702466434058313438 },
{ 0 , 30 , -0.086367983581040239094 },
{ 0 , 100 , 0.019985850304223118368 },
{ 0 , 300 , -0.033298554876305661021 },
{ 0 , 1000 , 0.024786686152420175921 },
{ 1 , 1e-08 , 5.0000000000000001046e-09 },
{ 1 , 3.16227766016838e-08 , 1.5811388300841892647e-08 },
{ 1 , 1e-07 , 4.999999999999993818e-08 },
{ 1 , 3.16227766016838e-07 , 1.5811388300841697432e-07 },
{ 1 , 1e-06 , 4.9999999999993750869e-07 },
{ 1 , 3.16227766016838e-06 , 1.5811388300822132559e-06 },
{ 1 , 1e-05 , 4.9999999999375003889e-06 },
{ 1 , 3.16227766016838e-05 , 1.5811388298865475016e-05 },
{ 1 , 1e-04 , 4.9999999937500002646e-05 },
{ 1 , 0.000316227766016838 , 0.00015811388103199544184 },
{ 1 , 0.001 , 0.00049999993750000253697 },
{ 1 , 0.00316227766016838 , 0.0015811368536614756226 },
{ 1 , 0.01 , 0.0049999375002604158624 },
{ 1 , 0.0316227766016838 , 0.015809411959653556917 },
{ 1 , 0.1 , 0.049937526036241998428 },
{ 1 , 0.316227766016838 , 0.15614567743386048582 },
{ 1 , 1 , 0.44005058574493355339 },
{ 1 , 2 , 0.57672480775687340326 },
{ 1 , 3 , 0.33905895852593642692 },
{ 1 , 4 , -0.066043328023549133232 },
{ 1 , 5 , -0.32757913759146523036 },
{ 1 , 6 , -0.27668385812756562947 },
{ 1 , 7 , -0.0046828234823458325664 },
{ 1 , 7.5 , 0.13524842757970551022 },
{ 1 , 7.6 , 0.15921376839635667522 },
{ 1 , 7.7 , 0.18131271532458800855 },
{ 1 , 7.8 , 0.20135687275589611578 },
{ 1 , 7.9 , 0.21917939992175122788 },
{ 1 , 8 , 0.23463634685391462908 },
{ 1 , 8.1 , 0.2476077669815928417 },
{ 1 , 8.2 , 0.25799859764868082745 },
{ 1 , 8.3 , 0.26573930204186430037 },
{ 1 , 8.4 , 0.27078626827683538458 },
{ 1 , 8.5 , 0.27312196367405372488 },
{ 1 , 9 , 0.24531178657332528004 },
{ 1 , 10 , 0.043472746168861438332 },
{ 1 , 11 , -0.17678529895672151495 },
{ 1 , 12 , -0.2234471044906276016 },
{ 1 , 13 , -0.070318052121778371055 },
{ 1 , 14 , 0.13337515469879324126 },
{ 1 , 15 , 0.20510403861352277666 },
{ 1 , 16 , 0.090397175661304188243 },
{ 1 , 17 , -0.097668492757780639435 },
{ 1 , 18 , -0.18799488548806958521 },
{ 1 , 19 , -0.10570143114240927729 },
{ 1 , 20 , 0.066833124175850036619 },
{ 1 , 30 , -0.1187510626166229516 },
{ 1 , 100 , -0.077145352014112156258 },
{ 1 , 300 , -0.031887431377499955709 },
{ 1 , 1000 , 0.0047283119070895248195 },
{ 2 , 1e-08 , 1.2499999999999999739e-17 },
{ 2 , 3.16227766016838e-08 , 1.2499999999999998506e-16 },
{ 2 , 1e-07 , 1.2499999999999988152e-15 },
{ 2 , 3.16227766016838e-07 , 1.2499999999999894672e-14 },
{ 2 , 1e-06 , 1.249999999999895719e-13 },
{ 2 , 3.16227766016838e-06 , 1.2499999999989582746e-12 },
{ 2 , 1e-05 , 1.2499999999895835475e-11 },
{ 2 , 3.16227766016838e-05 , 1.2499999998958334818e-10 },
{ 2 , 1e-04 , 1.2499999989583335487e-09 },
{ 2 , 0.000316227766016838 , 1.2499999895833333493e-08 },
{ 2 , 0.001 , 1.2499998958333367811e-07 },
{ 2 , 0.00316227766016838 , 1.2499989583336589057e-06 },
{ 2 , 0.01 , 1.2499895833658854395e-05 },
{ 2 , 0.0316227766016838 , 0.00012498958365884872863 },
{ 2 , 0.1 , 0.0012489586587999190141 },
{ 2 , 0.316227766016838 , 0.012396158312196680837 },
{ 2 , 1 , 0.11490348493190047363 },
{ 2 , 2 , 0.35283402861563772923 },
{ 2 , 3 , 0.48609126058589108288 },
{ 2 , 4 , 0.36412814585207281537 },
{ 2 , 5 , 0.046565116277752213736 },
{ 2 , 6 , -0.24287320996018546548 },
{ 2 , 7 , -0.3014172200859401296 },
{ 2 , 7.5 , -0.23027341052579025638 },
{ 2 , 7.6 , -0.20970347374567196996 },
{ 2 , 7.7 , -0.18746492781384410664 },
{ 2 , 7.8 , -0.16377784037295622932 },
{ 2 , 7.9 , -0.13887338916488553564 },
{ 2 , 8 , -0.11299172042407523708 },
{ 2 , 8.1 , -0.086379733802009056598 },
{ 2 , 8.2 , -0.059288814552752158726 },
{ 2 , 8.3 , -0.031972534137934507936 },
{ 2 , 8.4 , -0.0046843406386910518141 },
{ 2 , 8.5 , 0.022324739609784025052 },
{ 2 , 9 , 0.14484734153250397592 },
{ 2 , 10 , 0.25463031368512062391 },
{ 2 , 11 , 0.13904751877870125121 },
{ 2 , 12 , -0.084930494878604809172 },
{ 2 , 13 , -0.21774426424195678087 },
{ 2 , 14 , -0.15201988258205964555 },
{ 2 , 15 , 0.041571677975250471981 },
{ 2 , 16 , 0.18619872094129222284 },
{ 2 , 17 , 0.15836384123850347216 },
{ 2 , 18 , -0.0075325148878013998069 },
{ 2 , 19 , -0.15775590609569428713 },
{ 2 , 20 , -0.16034135192299814321 },
{ 2 , 30 , 0.07845124607326538213 },
{ 2 , 100 , -0.021528757344505360799 },
{ 2 , 300 , 0.033085972000455661501 },
{ 2 , 1000 , -0.024777229528605997089 },
{ 3 , 1e-08 , 2.0833333333333334614e-26 },
{ 3 , 3.16227766016838e-08 , 6.5880784586841223417e-25 },
{ 3 , 1e-07 , 2.0833333333333317693e-23 },
{ 3 , 3.16227766016838e-07 , 6.5880784586840819929e-22 },
{ 3 , 1e-06 , 2.0833333333332027799e-20 },
{ 3 , 3.16227766016838e-06 , 6.5880784586800051603e-19 },
{ 3 , 1e-05 , 2.0833333333203129762e-17 },
{ 3 , 3.16227766016838e-05 , 6.5880784582723696076e-16 },
{ 3 , 1e-04 , 2.083333332031250315e-14 },
{ 3 , 0.000316227766016838 , 6.5880784175086335665e-13 },
{ 3 , 0.001 , 2.0833332031250032117e-11 },
{ 3 , 0.00316227766016838 , 6.5880743411361163135e-10 },
{ 3 , 0.01 , 2.083320312532551971e-08 },
{ 3 , 0.0316227766016838 , 6.5876667140741846331e-07 },
{ 3 , 0.1 , 2.0820315754756265453e-05 },
{ 3 , 0.316227766016838 , 0.00065470057642003857534 },
{ 3 , 1 , 0.019563353982668403586 },
{ 3 , 2 , 0.1289432494744020552 },
{ 3 , 3 , 0.30906272225525166508 },
{ 3 , 4 , 0.43017147387562193472 },
{ 3 , 5 , 0.36483123061366695694 },
{ 3 , 6 , 0.11476838482077529602 },
{ 3 , 7 , -0.16755558799533423753 },
{ 3 , 7.5 , -0.25806091319346030621 },
{ 3 , 7.6 , -0.26958401773618401176 },
{ 3 , 7.7 , -0.27869709340970183487 },
{ 3 , 7.8 , -0.28534550884459158882 },
{ 3 , 7.9 , -0.2894950400052375139 },
{ 3 , 8 , -0.29113220706595221987 },
{ 3 , 8.1 , -0.29026442564925164502 },
{ 3 , 8.2 , -0.28691997060124291297 },
{ 3 , 8.3 , -0.28114775222882071315 },
{ 3 , 8.4 , -0.27301690667621203445 },
{ 3 , 8.5 , -0.26261620385768480457 },
{ 3 , 9 , -0.1809351903366568648 },
{ 3 , 10 , 0.058379379305186815396 },
{ 3 , 11 , 0.22734803305806741691 },
{ 3 , 12 , 0.19513693953109267909 },
{ 3 , 13 , 0.0033198169704070513292 },
{ 3 , 14 , -0.17680940686509599713 },
{ 3 , 15 , -0.19401825782012263599 },
{ 3 , 16 , -0.043847495425981139472 },
{ 3 , 17 , 0.13493057304919323092 },
{ 3 , 18 , 0.18632099329078039007 },
{ 3 , 19 , 0.072489661438052577225 },
{ 3 , 20 , -0.098901394560449676363 },
{ 3 , 30 , 0.12921122875972501642 },
{ 3 , 100 , 0.076284201720331942798 },
{ 3 , 300 , 0.032328577670839367397 },
{ 3 , 1000 , -0.0048274208252039483777 },
{ 4 , 1e-08 , 2.6041666666666666342e-35 },
{ 4 , 3.16227766016838e-08 , 2.6041666666666659714e-33 },
{ 4 , 1e-07 , 2.6041666666666649861e-31 },
{ 4 , 3.16227766016838e-07 , 2.6041666666666531276e-29 },
{ 4 , 1e-06 , 2.6041666666665358894e-27 },
{ 4 , 3.16227766016838e-06 , 2.6041666666653639536e-25 },
{ 4 , 1e-05 , 2.6041666666536465525e-23 },
{ 4 , 3.16227766016838e-05 , 2.604166666536458817e-21 },
{ 4 , 1e-04 , 2.6041666653645840559e-19 },
{ 4 , 0.000316227766016838 , 2.6041666536458338462e-17 },
{ 4 , 0.001 , 2.6041665364583368871e-15 },
{ 4 , 0.00316227766016838 , 2.604165364583604802e-13 },
{ 4 , 0.01 , 2.6041536458604605458e-11 },
{ 4 , 0.0316227766016838 , 2.6040364610459735901e-09 },
{ 4 , 0.1 , 2.6028648545684040871e-07 },
{ 4 , 0.316227766016838 , 2.5911729278009268374e-05 },
{ 4 , 1 , 0.002476638964109955255 },
{ 4 , 2 , 0.033995719807568429427 },
{ 4 , 3 , 0.13203418392461221953 },
{ 4 , 4 , 0.28112906496136008672 },
{ 4 , 5 , 0.39123236045864817623 },
{ 4 , 6 , 0.35764159478096080313 },
{ 4 , 7 , 0.15779814466136793394 },
{ 4 , 7.5 , 0.02382467997102201071 },
{ 4 , 7.6 , -0.0031260139407891210546 },
{ 4 , 7.7 , -0.029701638479430046702 },
{ 4 , 7.8 , -0.055718704892114230554 },
{ 4 , 7.9 , -0.080996261472003727722 },
{ 4 , 8 , -0.10535743487538892782 },
{ 4 , 8.1 , -0.12863095186410331006 },
{ 4 , 8.2 , -0.15065262735059631316 },
{ 4 , 8.3 , -0.17126680482265874139 },
{ 4 , 8.4 , -0.19032773555860327264 },
{ 4 , 8.5 , -0.2077008835093262229 },
{ 4 , 9 , -0.26547080175694187654 },
{ 4 , 10 , -0.21960268610200855965 },
{ 4 , 11 , -0.015039500747028132846 },
{ 4 , 12 , 0.18249896464415113484 },
{ 4 , 13 , 0.21927648745906774819 },
{ 4 , 14 , 0.076244422497018474183 },
{ 4 , 15 , -0.11917898110329952499 },
{ 4 , 16 , -0.20264153172603513453 },
{ 4 , 17 , -0.11074128604467056713 },
{ 4 , 18 , 0.069639512651394869236 },
{ 4 , 19 , 0.18064737812876355272 },
{ 4 , 20 , 0.13067093355486322781 },
{ 4 , 30 , -0.05260900032132037607 },
{ 4 , 100 , 0.026105809447725277644 },
{ 4 , 300 , -0.032439400447038871378 },
{ 4 , 1000 , 0.024748265003654772859 },
{ 5 , 1e-08 , 2.6041666666666666817e-44 },
{ 5 , 3.16227766016838e-08 , 8.2350980733551520153e-42 },
{ 5 , 1e-07 , 2.6041666666666648818e-39 },
{ 5 , 3.16227766016838e-07 , 8.2350980733551185479e-37 },
{ 5 , 1e-06 , 2.6041666666665576923e-34 },
{ 5 , 3.16227766016838e-06 , 8.2350980733517218878e-32 },
{ 5 , 1e-05 , 2.6041666666558171668e-29 },
{ 5 , 3.16227766016838e-05 , 8.2350980730120282499e-27 },
{ 5 , 1e-04 , 2.6041666655815978351e-24 },
{ 5 , 0.000316227766016838 , 8.2350980390422474771e-22 },
{ 5 , 0.001 , 2.6041665581597245947e-19 },
{ 5 , 0.00316227766016838 , 8.2350946420649040367e-17 },
{ 5 , 0.01 , 2.6041558159915982186e-14 },
{ 5 , 0.0316227766016838 , 8.2347549503960048977e-12 },
{ 5 , 0.1 , 2.6030817909644421178e-09 },
{ 5 , 0.316227766016838 , 8.2008463739855235578e-07 },
{ 5 , 1 , 0.00024975773021123438876 },
{ 5 , 2 , 0.0070396297558716850601 },
{ 5 , 3 , 0.043028434877047584683 },
{ 5 , 4 , 0.13208665604709826646 },
{ 5 , 5 , 0.26114054612017006951 },
{ 5 , 6 , 0.36208707488717239986 },
{ 5 , 7 , 0.34789632475118331678 },
{ 5 , 7.5 , 0.28347390516255044357 },
{ 5 , 7.6 , 0.26629347674587972028 },
{ 5 , 7.7 , 0.2478382482362680439 },
{ 5 , 7.8 , 0.22819811921165392143 },
{ 5 , 7.9 , 0.20747350940067682545 },
{ 5 , 8 , 0.18577477219056331981 },
{ 5 , 8.1 , 0.16322151022791506203 },
{ 5 , 8.2 , 0.13994179757627084326 },
{ 5 , 8.3 , 0.11607131384553516507 },
{ 5 , 8.4 , 0.091752396620399440108 },
{ 5 , 8.5 , 0.067133019378318919967 },
{ 5 , 9 , -0.055038855669513706004 },
{ 5 , 10 , -0.23406152818679362704 },
{ 5 , 11 , -0.23828585178317879256 },
{ 5 , 12 , -0.073470963101658584571 },
{ 5 , 13 , 0.13161955992748081146 },
{ 5 , 14 , 0.22037764829196368477 },
{ 5 , 15 , 0.1304561345650295523 },
{ 5 , 16 , -0.057473270437036434732 },
{ 5 , 17 , -0.18704411942315585238 },
{ 5 , 18 , -0.15537009877904933708 },
{ 5 , 19 , 0.0035723925109004857348 },
{ 5 , 20 , 0.15116976798239498136 },
{ 5 , 30 , -0.14324029551207712041 },
{ 5 , 100 , -0.074195736964513925304 },
{ 5 , 300 , -0.03319362834942707341 },
{ 5 , 1000 , 0.0050254069452331864842 },
{ 6 , 1e-08 , 2.170138888888889163e-53 },
{ 6 , 3.16227766016838e-08 , 2.1701388888888880947e-50 },
{ 6 , 1e-07 , 2.1701388888888875174e-47 },
{ 6 , 3.16227766016838e-07 , 2.170138888888880604e-44 },
{ 6 , 1e-06 , 2.1701388888888106952e-41 },
{ 6 , 3.16227766016838e-06 , 2.1701388888881133808e-38 },
{ 6 , 1e-05 , 2.1701388888811393588e-35 },
{ 6 , 3.16227766016838e-05 , 2.1701388888113848269e-32 },
{ 6 , 1e-04 , 2.1701388881138396044e-29 },
{ 6 , 0.000316227766016838 , 2.1701388811383932341e-26 },
{ 6 , 0.001 , 2.1701388113839301844e-23 },
{ 6 , 0.00316227766016838 , 2.1701381138394068717e-20 },
{ 6 , 0.01 , 2.1701311384049674283e-17 },
{ 6 , 0.0316227766016838 , 2.1700613851395740978e-14 },
{ 6 , 0.1 , 2.1693639603760032489e-11 },
{ 6 , 0.316227766016838 , 2.1624004918010960028e-08 },
{ 6 , 1 , 2.0938338002389272967e-05 },
{ 6 , 2 , 0.0012024289717899932714 },
{ 6 , 3 , 0.011393932332213070266 },
{ 6 , 4 , 0.049087575156385579445 },
{ 6 , 5 , 0.13104873178169201831 },
{ 6 , 6 , 0.24583686336432652997 },
{ 6 , 7 , 0.33919660498317966146 },
{ 6 , 7.5 , 0.35414052691237862813 },
{ 6 , 7.6 , 0.35351216755378872536 },
{ 6 , 7.7 , 0.35156949333172621275 },
{ 6 , 7.8 , 0.3482803961891063893 },
{ 6 , 7.9 , 0.34362095691589844559 },
{ 6 , 8 , 0.33757590011359311921 },
{ 6 , 8.1 , 0.33013898918251699532 },
{ 6 , 8.2 , 0.32131335610214611931 },
{ 6 , 8.3 , 0.3111117612630625584 },
{ 6 , 8.4 , 0.29955677915431688785 },
{ 6 , 8.5 , 0.28668090630734854862 },
{ 6 , 9 , 0.20431651767970440692 },
{ 6 , 10 , -0.014458842084785107282 },
{ 6 , 11 , -0.20158400087404348966 },
{ 6 , 12 , -0.24372476722886662892 },
{ 6 , 13 , -0.1180306721302363665 },
{ 6 , 14 , 0.081168183425812737153 },
{ 6 , 15 , 0.20614973747998591169 },
{ 6 , 16 , 0.16672073770288736716 },
{ 6 , 17 , 0.00071533344281418307069 },
{ 6 , 18 , -0.15595623419531115528 },
{ 6 , 19 , -0.17876717154407903432 },
{ 6 , 20 , -0.055086049563665764883 },
{ 6 , 30 , 0.0048622351506280026001 },
{ 6 , 100 , -0.033525383144176669481 },
{ 6 , 300 , 0.031332946168724638836 },
{ 6 , 1000 , -0.024698010934202440508 },
{ 7 , 1e-08 , 1.5500992063492066701e-62 },
{ 7 , 3.16227766016838e-08 , 4.9018440912828279875e-59 },
{ 7 , 1e-07 , 1.5500992063492053031e-55 },
{ 7 , 3.16227766016838e-07 , 4.9018440912828133382e-52 },
{ 7 , 1e-06 , 1.55009920634915736e-48 },
{ 7 , 3.16227766016838e-06 , 4.9018440912812964979e-45 },
{ 7 , 1e-05 , 1.550099206344363137e-41 },
{ 7 , 3.16227766016838e-05 , 4.9018440911296494971e-38 },
{ 7 , 1e-04 , 1.5500992058648010339e-34 },
{ 7 , 0.000316227766016838 , 4.9018440759645687969e-31 },
{ 7 , 0.001 , 1.5500991579086071003e-27 },
{ 7 , 0.00316227766016838 , 4.9018425594567649302e-24 },
{ 7 , 0.01 , 1.550094362295914728e-20 },
{ 7 , 0.0316227766016838 , 4.9016909107824929132e-17 },
{ 7 , 0.1 , 1.5496148676202282287e-13 },
{ 7 , 0.316227766016838 , 4.8865470861431505644e-10 },
{ 7 , 1 , 1.5023258174368078499e-06 },
{ 7 , 2 , 0.00017494407486827416175 },
{ 7 , 3 , 0.0025472944518046929108 },
{ 7 , 4 , 0.015176069422058449318 },
{ 7 , 5 , 0.053376410155890716136 },
{ 7 , 6 , 0.12958665184148068783 },
{ 7 , 7 , 0.23358356950569605925 },
{ 7 , 7.5 , 0.28315093789725531703 },
{ 7 , 7.6 , 0.29188362991799726709 },
{ 7 , 7.7 , 0.30006226085213638655 },
{ 7 , 7.8 , 0.30761787492543296585 },
{ 7 , 7.9 , 0.31448237452220684229 },
{ 7 , 8 , 0.32058907797982633125 },
{ 7 , 8.1 , 0.3258732885609990082 },
{ 7 , 8.2 , 0.33027286989028453723 },
{ 7 , 8.3 , 0.33372882292033839713 },
{ 7 , 8.4 , 0.33618585931433897507 },
{ 7 , 8.5 , 0.33759296599676130723 },
{ 7 , 9 , 0.32746087924245292911 },
{ 7 , 10 , 0.21671091768505151842 },
{ 7 , 11 , 0.018376032647858614455 },
{ 7 , 12 , -0.17025380412720803047 },
{ 7 , 13 , -0.24057094958616048741 },
{ 7 , 14 , -0.15080491964126707671 },
{ 7 , 15 , 0.034463655418959161791 },
{ 7 , 16 , 0.18251382371420196704 },
{ 7 , 17 , 0.1875490606769070201 },
{ 7 , 18 , 0.051399275982175231248 },
{ 7 , 19 , -0.11647797453873988405 },
{ 7 , 20 , -0.18422139772059445417 },
{ 7 , 30 , 0.1451851895723283159 },
{ 7 , 100 , 0.070172690987212724134 },
{ 7 , 300 , 0.034446946196176060628 },
{ 7 , 1000 , -0.0053217830764436153956 },
{ 8 , 1e-08 , 9.6881200396825412359e-72 },
{ 8 , 3.16227766016838e-08 , 9.6881200396825359082e-68 },
{ 8 , 1e-07 , 9.6881200396825335915e-64 },
{ 8 , 3.16227766016838e-07 , 9.6881200396825091166e-60 },
{ 8 , 1e-06 , 9.6881200396822669073e-56 },
{ 8 , 3.16227766016838e-06 , 9.6881200396798449492e-52 },
{ 8 , 1e-05 , 9.6881200396556345156e-48 },
{ 8 , 3.16227766016838e-05 , 9.6881200394134308774e-44 },
{ 8 , 1e-04 , 9.6881200369913995322e-40 },
{ 8 , 0.000316227766016838 , 9.688120012771098157e-36 },
{ 8 , 0.001 , 9.6881197705681010442e-32 },
{ 8 , 0.00316227766016838 , 9.688117348538421731e-28 },
{ 8 , 0.01 , 9.6880931282716245736e-24 },
{ 8 , 0.0316227766016838 , 9.6878509286008909493e-20 },
{ 8 , 0.1 , 9.6854292315946525669e-16 },
{ 8 , 0.316227766016838 , 9.6612422089625085973e-12 },
{ 8 , 1 , 9.4223441726045005392e-08 },
{ 8 , 2 , 2.2179552287925904881e-05 },
{ 8 , 3 , 0.00049344177620883479096 },
{ 8 , 4 , 0.0040286678208190035769 },
{ 8 , 5 , 0.018405216654802002835 },
{ 8 , 6 , 0.056531990932461785582 },
{ 8 , 7 , 0.12797053402821254031 },
{ 8 , 7.5 , 0.17440789049583127479 },
{ 8 , 7.6 , 0.18416820334778524759 },
{ 8 , 7.7 , 0.19399825367215817185 },
{ 8 , 7.8 , 0.20385425111295268907 },
{ 8 , 7.9 , 0.21368958021206302389 },
{ 8 , 8 , 0.22345498635110294661 },
{ 8 , 8.1 , 0.23309879351550599758 },
{ 8 , 8.2 , 0.24256715346663235144 },
{ 8 , 8.3 , 0.25180432559052018382 },
{ 8 , 8.4 , 0.26075298636958132992 },
{ 8 , 8.5 , 0.26935456709908189854 },
{ 8 , 9 , 0.30506707225300011554 },
{ 8 , 10 , 0.31785412684385727644 },
{ 8 , 11 , 0.22497167878949989039 },
{ 8 , 12 , 0.045095329080457241533 },
{ 8 , 13 , -0.14104573511639803551 },
{ 8 , 14 , -0.23197310306707982774 },
{ 8 , 15 , -0.17398365908895732646 },
{ 8 , 16 , -0.0070211419529606520704 },
{ 8 , 17 , 0.1537368341734622057 },
{ 8 , 18 , 0.19593344884811411677 },
{ 8 , 19 , 0.092941295568165452345 },
{ 8 , 20 , -0.073868928840750344711 },
{ 8 , 30 , 0.062890853316458550371 },
{ 8 , 100 , 0.043349559882386451415 },
{ 8 , 300 , -0.029725422012903089664 },
{ 8 , 1000 , 0.02462350597113223058 } ,
{ 9 , 1e-08 , 5.382288910934745386e-81 },
{ 9 , 3.16227766016838e-08 , 1.702029198362092975e-76 },
{ 9 , 1e-07 , 5.3822889109347404393e-72 },
{ 9 , 3.16227766016838e-07 , 1.7020291983620889989e-67 },
{ 9 , 1e-06 , 5.3822889109346077433e-63 },
{ 9 , 3.16227766016838e-06 , 1.7020291983616675345e-58 },
{ 9 , 1e-05 , 5.3822889109212923968e-54 },
{ 9 , 3.16227766016838e-05 , 1.7020291983195439884e-49 },
{ 9 , 1e-04 , 5.3822889095891748719e-45 },
{ 9 , 0.000316227766016838 , 1.7020291941070210695e-40 },
{ 9 , 0.001 , 5.382288776377523226e-36 },
{ 9 , 0.00316227766016838 , 1.7020287728548427703e-31 },
{ 9 , 0.01 , 5.3822754552277587118e-27 },
{ 9 , 0.0316227766016838 , 1.7019866481156611902e-22 },
{ 9 , 0.1 , 5.3809434916023306372e-18 },
{ 9 , 0.316227766016838 , 1.6977789573201714453e-13 },
{ 9 , 1 , 5.2492501799118757129e-09 },
{ 9 , 2 , 2.492343435133064173e-06 },
{ 9 , 3 , 8.4395021309091773631e-05 },
{ 9 , 4 , 0.00093860186121756401367 },
{ 9 , 5 , 0.005520283139475687037 },
{ 9 , 6 , 0.021165323978417364265 },
{ 9 , 7 , 0.058920508273075426764 },
{ 9 , 7.5 , 0.088919228493851462658 },
{ 9 , 7.6 , 0.095838903445761125521 },
{ 9 , 7.7 , 0.10305099353156887965 },
{ 9 , 7.8 , 0.11054469146011103309 },
{ 9 , 7.9 , 0.11830664869209804591 },
{ 9 , 8 , 0.12632089472237958971 },
{ 9 , 8.1 , 0.13456877270419806414 },
{ 9 , 8.2 , 0.14302889297143717151 },
{ 9 , 8.3 , 0.15167710592885710885 },
{ 9 , 8.4 , 0.16048649567533976312 },
{ 9 , 8.5 , 0.16942739560151048872 },
{ 9 , 9 , 0.2148805825406584491 },
{ 9 , 10 , 0.29185568526512006837 },
{ 9 , 11 , 0.30885550013686852155 },
{ 9 , 12 , 0.23038090956781773211 },
{ 9 , 13 , 0.066976198673670619965 },
{ 9 , 14 , -0.11430719814968128001 },
{ 9 , 15 , -0.22004622511384699934 },
{ 9 , 16 , -0.18953496566716260263 },
{ 9 , 17 , -0.042855569690119083015 },
{ 9 , 18 , 0.12276378966059287023 },
{ 9 , 19 , 0.19474432870140553908 },
{ 9 , 20 , 0.12512625464799415065 },
{ 9 , 30 , -0.11164340113688375755 },
{ 9 , 100 , -0.063236761406030891908 },
{ 9 , 300 , -0.036032302036864222172 },
{ 9 , 1000 , 0.0057157591719817308837 },
{ 10 , 1e-08 , 2.6911444554673727331e-90 },
{ 10 , 3.16227766016838e-08 , 2.6911444554673710334e-85 },
{ 10 , 1e-07 , 2.6911444554673703522e-80 },
{ 10 , 3.16227766016838e-07 , 2.6911444554673646193e-75 },
{ 10 , 1e-06 , 2.6911444554673096152e-70 },
{ 10 , 3.16227766016838e-06 , 2.6911444554667591352e-65 },
{ 10 , 1e-05 , 2.6911444554612582946e-60 },
{ 10 , 3.16227766016838e-05 , 2.6911444554062112799e-55 },
{ 10 , 1e-04 , 2.6911444548557502587e-50 },
{ 10 , 0.000316227766016838 , 2.691144449351135505e-45 },
{ 10 , 0.001 , 2.6911443943049990395e-40 },
{ 10 , 0.00316227766016838 , 2.6911438438436965201e-35 },
{ 10 , 0.01 , 2.6911383392363445476e-30 },
{ 10 , 0.0316227766016838 , 2.691083293730485964e-25 },
{ 10 , 0.1 , 2.6905328954342172306e-20 },
{ 10 , 0.316227766016838 , 2.6850345850670040022e-15 },
{ 10 , 1 , 2.630615123687452921e-10 },
{ 10 , 2 , 2.5153862827167368199e-07 },
{ 10 , 3 , 1.292835164571588302e-05 },
{ 10 , 4 , 0.00019504055466003448463 },
{ 10 , 5 , 0.0014678026473104743583 },
{ 10 , 6 , 0.0069639810027903158857 },
{ 10 , 7 , 0.023539344388267140901 },
{ 10 , 7.5 , 0.038998257889412211996 },
{ 10 , 7.6 , 0.042818673234280582585 },
{ 10 , 7.7 , 0.04690017276527555512 },
{ 10 , 7.8 , 0.051248883025765065713 },
{ 10 , 7.9 , 0.055869872504109699407 },
{ 10 , 8 , 0.060767026774251164944 },
{ 10 , 8.1 , 0.065942923604934144954 },
{ 10 , 8.2 , 0.071398709153595626975 },
{ 10 , 8.3 , 0.077133976423868738648 },
{ 10 , 8.4 , 0.083146647220432454151 },
{ 10 , 8.5 , 0.089432858880587384753 },
{ 10 , 9 , 0.12469409282831672714 },
{ 10 , 10 , 0.20748610663335886883 },
{ 10 , 11 , 0.28042823052537591 },
{ 10 , 12 , 0.300476035271269315 },
{ 10 , 13 , 0.23378201020301889179 },
{ 10 , 14 , 0.085006705446061009424 },
{ 10 , 15 , -0.09007181104765905888 },
{ 10 , 16 , -0.20620569442259728543 },
{ 10 , 17 , -0.19911331972770593413 },
{ 10 , 18 , -0.073169659187521246535 },
{ 10 , 19 , 0.091553331622639774756 },
{ 10 , 20 , 0.18648255802394508862 },
{ 10 , 30 , -0.129876893998588816 },
{ 10 , 100 , -0.054732176935472012791 },
{ 10 , 300 , 0.027563483890691235778 },
{ 10 , 1000 , -0.02452062230603655954 },
{ 30 , 1e-08 , 3.511074584737334481e-282 },
{ 30 , 3.16227766016838e-08 , 3.5110745847373276748e-267 },
{ 30 , 1e-07 , 3.5110745847373271436e-252 },
{ 30 , 3.16227766016838e-07 , 3.5110745847373244839e-237 },
{ 30 , 1e-06 , 3.5110745847372989351e-222 },
{ 30 , 3.16227766016838e-06 , 3.511074584737044636e-207 },
{ 30 , 1e-05 , 3.5110745847345094386e-192 },
{ 30 , 3.16227766016838e-05 , 3.5110745847090235522e-177 },
{ 30 , 1e-04 , 3.5110745844541855471e-162 },
{ 30 , 0.000316227766016838 , 3.5110745819058229075e-147 },
{ 30 , 0.001 , 3.5110745564222159037e-132 },
{ 30 , 0.00316227766016838 , 3.5110743015861690319e-117 },
{ 30 , 0.01 , 3.5110717532266786188e-102 },
{ 30 , 0.0316227766016838 , 3.5110462697303107185e-87 },
{ 30 , 0.1 , 3.5107914446214635799e-72 },
{ 30 , 0.316227766016838 , 3.5082441787554764315e-57 },
{ 30 , 1 , 3.4828697942514824077e-42 },
{ 30 , 2 , 3.6502562664740960186e-33 },
{ 30 , 3 , 6.7223399381463293316e-28 },
{ 30 , 4 , 3.5570357020361055268e-24 },
{ 30 , 5 , 2.6711772782507989195e-21 },
{ 30 , 6 , 5.7984683652785706951e-19 },
{ 30 , 7 , 5.3172607940100176027e-17 },
{ 30 , 7.5 , 3.9705139492720914996e-16 },
{ 30 , 7.6 , 5.8351206236969734897e-16 },
{ 30 , 7.7 , 8.5295046954365007979e-16 },
{ 30 , 7.8 , 1.240300099862031423e-15 },
{ 30 , 7.9 , 1.7943809060373146352e-15 },
{ 30 , 8 , 2.5830997825663086363e-15 },
{ 30 , 8.1 , 3.7004810818946501642e-15 },
{ 30 , 8.2 , 5.2761304350589830578e-15 },
{ 30 , 8.3 , 7.4879207291538333461e-15 },
{ 30 , 8.4 , 1.057892772982890842e-14 },
{ 30 , 8.5 , 1.4879948521285087748e-14 },
{ 30 , 9 , 7.6921564693354977569e-14 },
{ 30 , 10 , 1.5510960782574666161e-12 },
{ 30 , 11 , 2.2735383676316185421e-11 },
{ 30 , 12 , 2.5522590430344176732e-10 },
{ 30 , 13 , 2.2828783239868354402e-09 },
{ 30 , 14 , 1.6775399533577877891e-08 },
{ 30 , 15 , 1.0374710201078721135e-07 },
{ 30 , 16 , 5.5052386643076382366e-07 },
{ 30 , 17 , 2.5460065118711982301e-06 },
{ 30 , 18 , 1.0393652487465728599e-05 },
{ 30 , 19 , 3.7849142225173515583e-05 },
{ 30 , 20 , 0.00012401536360354329497 },
{ 30 , 30 , 0.14393585001030734238 },
{ 30 , 100 , 0.081460129581172213697 },
{ 30 , 300 , -0.029514887800373371812 },
{ 30 , 1000 , -0.020271896981075843147 },
{ 100 , 1e-08 , 0 },
{ 100 , 3.16227766016838e-08 , 0 },
{ 100 , 1e-07 , 0 },
{ 100 , 3.16227766016838e-07 , 0 },
{ 100 , 1e-06 , 0 },
{ 100 , 3.16227766016838e-06 , 0 },
{ 100 , 1e-05 , 0 },
{ 100 , 3.16227766016838e-05 , 0 },
{ 100 , 1e-04 , 0 },
{ 100 , 0.000316227766016838 , 0 },
{ 100 , 0.001 , 0 },
{ 100 , 0.00316227766016838 , 0 },
{ 100 , 0.01 , 0 },
{ 100 , 0.0316227766016838 , 0 },
{ 100 , 0.1 , 8.4525165351217888791e-289 },
{ 100 , 0.316227766016838 , 8.4506337559752745816e-239 },
{ 100 , 1 , 8.4318287896267070128e-189 },
{ 100 , 2 , 1.0609531124391718917e-158 },
{ 100 , 3 , 4.260360181132621405e-141 },
{ 100 , 4 , 1.305547836452271925e-128 },
{ 100 , 5 , 6.2677893955418752099e-119 },
{ 100 , 6 , 5.0513258541507019365e-111 },
{ 100 , 7 , 2.4215591572118171706e-104 },
{ 100 , 7.5 , 2.3583800455568589368e-101 },
{ 100 , 7.6 , 8.8352979458474109476e-101 },
{ 100 , 7.7 , 3.253025120751429903e-100 },
{ 100 , 7.8 , 1.1776236102157393805e-99 },
{ 100 , 7.9 , 4.1933885427120016432e-99 },
{ 100 , 8 , 1.4694094093552327336e-98 },
{ 100 , 8.1 , 5.0688862671208964077e-98 },
{ 100 , 8.2 , 1.7220304874625643909e-97 },
{ 100 , 8.3 , 5.7635248300942440709e-97 },
{ 100 , 8.4 , 1.9011188242236321325e-96 },
{ 100 , 8.5 , 6.182346491260611201e-96 },
{ 100 , 9 , 1.8369106342703587456e-93 },
{ 100 , 10 , 6.5973160641553802341e-89 },
{ 100 , 11 , 8.6297901331738815878e-85 },
{ 100 , 12 , 4.8983704457507876536e-81 },
{ 100 , 13 , 1.3781127544328333402e-77 },
{ 100 , 14 , 2.1310751903146119988e-74 },
{ 100 , 15 , 1.9660095611249536378e-71 },
{ 100 , 16 , 1.1559435724349575529e-68 },
{ 100 , 17 , 4.5721265690179434188e-66 },
{ 100 , 18 , 1.2722370655682102766e-63 },
{ 100 , 19 , 2.5856336302772506687e-61 },
{ 100 , 20 , 3.9617550943362506795e-59 },
{ 100 , 30 , 4.5788015281752424119e-42 },
{ 100 , 100 , 0.09636667329586150188 },
{ 100 , 300 , -0.014491227064785699996 },
{ 100 , 1000 , 0.011676135007802557891 },
{ 300 , 1e-08 , 0 },
{ 300 , 3.16227766016838e-08 , 0 },
{ 300 , 1e-07 , 0 },
{ 300 , 3.16227766016838e-07 , 0 },
{ 300 , 1e-06 , 0 },
{ 300 , 3.16227766016838e-06 , 0 },
{ 300 , 1e-05 , 0 },
{ 300 , 3.16227766016838e-05 , 0 },
{ 300 , 1e-04 , 0 },
{ 300 , 0.000316227766016838 , 0 },
{ 300 , 0.001 , 0 },
{ 300 , 0.00316227766016838 , 0 },
{ 300 , 0.01 , 0 },
{ 300 , 0.0316227766016838 , 0 },
{ 300 , 0.1 , 0 },
{ 300 , 0.316227766016838 , 0 },
{ 300 , 1 , 0 },
{ 300 , 2 , 0 },
{ 300 , 3 , 0 },
{ 300 , 4 , 0 },
{ 300 , 5 , 0 },
{ 300 , 6 , 0 },
{ 300 , 7 , 0 },
{ 300 , 7.5 , 0 },
{ 300 , 7.6 , 0 },
{ 300 , 7.7 , 0 },
{ 300 , 7.8 , 0 },
{ 300 , 7.9 , 0 },
{ 300 , 8 , 0 },
{ 300 , 8.1 , 0 },
{ 300 , 8.2 , 0 },
{ 300 , 8.3 , 0 },
{ 300 , 8.4 , 0 },
{ 300 , 8.5 , 0 },
{ 300 , 9 , 0 },
{ 300 , 10 , 0 },
{ 300 , 11 , 0 },
{ 300 , 12 , 0 },
{ 300 , 13 , 0 },
{ 300 , 14 , 0 },
{ 300 , 15 , 0 },
{ 300 , 16 , 0 },
{ 300 , 17 , 0 },
{ 300 , 18 , 0 },
{ 300 , 19 , 0 },
{ 300 , 20 , 0 },
{ 300 , 30 , 1.0388021531643495593e-262 },
{ 300 , 100 , 3.5203666218469330448e-109 },
{ 300 , 300 , 0.066818398128979980544 },
{ 300 , 1000 , 0.00046782803879124944908 }
};
@Test
public void testBesselJ() {
final double tol = 1e-15;
for (int i = 0; i < BESSEL_J_REF.length; i++) {
final double[] data = BESSEL_J_REF[i];
final double order = data[0];
final double x = data[1];
final double expected = data[2];
final double actual = BesselJ.value(order, x);
String msg = "" + order + " @ " + x;
Assert.assertEquals(msg, expected, actual, tol);
}
}
@Test(expected=MathIllegalArgumentException.class)
public void testIAEBadOrder() {
BesselJ.value(-1, 1);
}
@Test(expected=MathIllegalArgumentException.class)
public void testIAEBadArgument() {
BesselJ.value(1, 100000);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.