repo_id stringclasses 875
values | size int64 974 38.9k | file_path stringlengths 10 308 | content stringlengths 974 38.9k |
|---|---|---|---|
googleapis/google-cloud-java | 36,046 | java-orchestration-airflow/proto-google-cloud-orchestration-airflow-v1/src/main/java/com/google/cloud/orchestration/airflow/service/v1/CreateUserWorkloadsConfigMapRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/orchestration/airflow/service/v1/environments.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.orchestration.airflow.service.v1;
/**
*
*
* <pre>
* Create user workloads ConfigMap request.
* </pre>
*
* Protobuf type {@code
* google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest}
*/
public final class CreateUserWorkloadsConfigMapRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest)
CreateUserWorkloadsConfigMapRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateUserWorkloadsConfigMapRequest.newBuilder() to construct.
private CreateUserWorkloadsConfigMapRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateUserWorkloadsConfigMapRequest() {
parent_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateUserWorkloadsConfigMapRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.orchestration.airflow.service.v1.EnvironmentsOuterClass
.internal_static_google_cloud_orchestration_airflow_service_v1_CreateUserWorkloadsConfigMapRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.orchestration.airflow.service.v1.EnvironmentsOuterClass
.internal_static_google_cloud_orchestration_airflow_service_v1_CreateUserWorkloadsConfigMapRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest
.class,
com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest
.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The environment name to create a ConfigMap for, in the form:
* "projects/{projectId}/locations/{locationId}/environments/{environmentId}"
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The environment name to create a ConfigMap for, in the form:
* "projects/{projectId}/locations/{locationId}/environments/{environmentId}"
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int USER_WORKLOADS_CONFIG_MAP_FIELD_NUMBER = 2;
private com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap
userWorkloadsConfigMap_;
/**
*
*
* <pre>
* Required. User workloads ConfigMap to create.
* </pre>
*
* <code>
* .google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the userWorkloadsConfigMap field is set.
*/
@java.lang.Override
public boolean hasUserWorkloadsConfigMap() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. User workloads ConfigMap to create.
* </pre>
*
* <code>
* .google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The userWorkloadsConfigMap.
*/
@java.lang.Override
public com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap
getUserWorkloadsConfigMap() {
return userWorkloadsConfigMap_ == null
? com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap
.getDefaultInstance()
: userWorkloadsConfigMap_;
}
/**
*
*
* <pre>
* Required. User workloads ConfigMap to create.
* </pre>
*
* <code>
* .google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMapOrBuilder
getUserWorkloadsConfigMapOrBuilder() {
return userWorkloadsConfigMap_ == null
? com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap
.getDefaultInstance()
: userWorkloadsConfigMap_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getUserWorkloadsConfigMap());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUserWorkloadsConfigMap());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof
com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest)) {
return super.equals(obj);
}
com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest other =
(com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasUserWorkloadsConfigMap() != other.hasUserWorkloadsConfigMap()) return false;
if (hasUserWorkloadsConfigMap()) {
if (!getUserWorkloadsConfigMap().equals(other.getUserWorkloadsConfigMap())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasUserWorkloadsConfigMap()) {
hash = (37 * hash) + USER_WORKLOADS_CONFIG_MAP_FIELD_NUMBER;
hash = (53 * hash) + getUserWorkloadsConfigMap().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.orchestration.airflow.service.v1
.CreateUserWorkloadsConfigMapRequest
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.orchestration.airflow.service.v1
.CreateUserWorkloadsConfigMapRequest
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.orchestration.airflow.service.v1
.CreateUserWorkloadsConfigMapRequest
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.orchestration.airflow.service.v1
.CreateUserWorkloadsConfigMapRequest
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.orchestration.airflow.service.v1
.CreateUserWorkloadsConfigMapRequest
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.orchestration.airflow.service.v1
.CreateUserWorkloadsConfigMapRequest
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.orchestration.airflow.service.v1
.CreateUserWorkloadsConfigMapRequest
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.orchestration.airflow.service.v1
.CreateUserWorkloadsConfigMapRequest
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.orchestration.airflow.service.v1
.CreateUserWorkloadsConfigMapRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.orchestration.airflow.service.v1
.CreateUserWorkloadsConfigMapRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.orchestration.airflow.service.v1
.CreateUserWorkloadsConfigMapRequest
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.orchestration.airflow.service.v1
.CreateUserWorkloadsConfigMapRequest
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest
prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Create user workloads ConfigMap request.
* </pre>
*
* Protobuf type {@code
* google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest)
com.google.cloud.orchestration.airflow.service.v1
.CreateUserWorkloadsConfigMapRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.orchestration.airflow.service.v1.EnvironmentsOuterClass
.internal_static_google_cloud_orchestration_airflow_service_v1_CreateUserWorkloadsConfigMapRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.orchestration.airflow.service.v1.EnvironmentsOuterClass
.internal_static_google_cloud_orchestration_airflow_service_v1_CreateUserWorkloadsConfigMapRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest
.class,
com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest
.Builder.class);
}
// Construct using
// com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUserWorkloadsConfigMapFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
userWorkloadsConfigMap_ = null;
if (userWorkloadsConfigMapBuilder_ != null) {
userWorkloadsConfigMapBuilder_.dispose();
userWorkloadsConfigMapBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.orchestration.airflow.service.v1.EnvironmentsOuterClass
.internal_static_google_cloud_orchestration_airflow_service_v1_CreateUserWorkloadsConfigMapRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest
getDefaultInstanceForType() {
return com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest
build() {
com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest
buildPartial() {
com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest result =
new com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest(
this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest
result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.userWorkloadsConfigMap_ =
userWorkloadsConfigMapBuilder_ == null
? userWorkloadsConfigMap_
: userWorkloadsConfigMapBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof
com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest) {
return mergeFrom(
(com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest)
other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest
other) {
if (other
== com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest
.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasUserWorkloadsConfigMap()) {
mergeUserWorkloadsConfigMap(other.getUserWorkloadsConfigMap());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(
getUserWorkloadsConfigMapFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The environment name to create a ConfigMap for, in the form:
* "projects/{projectId}/locations/{locationId}/environments/{environmentId}"
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The environment name to create a ConfigMap for, in the form:
* "projects/{projectId}/locations/{locationId}/environments/{environmentId}"
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The environment name to create a ConfigMap for, in the form:
* "projects/{projectId}/locations/{locationId}/environments/{environmentId}"
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The environment name to create a ConfigMap for, in the form:
* "projects/{projectId}/locations/{locationId}/environments/{environmentId}"
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The environment name to create a ConfigMap for, in the form:
* "projects/{projectId}/locations/{locationId}/environments/{environmentId}"
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap
userWorkloadsConfigMap_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap,
com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap.Builder,
com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMapOrBuilder>
userWorkloadsConfigMapBuilder_;
/**
*
*
* <pre>
* Required. User workloads ConfigMap to create.
* </pre>
*
* <code>
* .google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the userWorkloadsConfigMap field is set.
*/
public boolean hasUserWorkloadsConfigMap() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. User workloads ConfigMap to create.
* </pre>
*
* <code>
* .google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The userWorkloadsConfigMap.
*/
public com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap
getUserWorkloadsConfigMap() {
if (userWorkloadsConfigMapBuilder_ == null) {
return userWorkloadsConfigMap_ == null
? com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap
.getDefaultInstance()
: userWorkloadsConfigMap_;
} else {
return userWorkloadsConfigMapBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. User workloads ConfigMap to create.
* </pre>
*
* <code>
* .google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUserWorkloadsConfigMap(
com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap value) {
if (userWorkloadsConfigMapBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
userWorkloadsConfigMap_ = value;
} else {
userWorkloadsConfigMapBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. User workloads ConfigMap to create.
* </pre>
*
* <code>
* .google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUserWorkloadsConfigMap(
com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap.Builder
builderForValue) {
if (userWorkloadsConfigMapBuilder_ == null) {
userWorkloadsConfigMap_ = builderForValue.build();
} else {
userWorkloadsConfigMapBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. User workloads ConfigMap to create.
* </pre>
*
* <code>
* .google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUserWorkloadsConfigMap(
com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap value) {
if (userWorkloadsConfigMapBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& userWorkloadsConfigMap_ != null
&& userWorkloadsConfigMap_
!= com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap
.getDefaultInstance()) {
getUserWorkloadsConfigMapBuilder().mergeFrom(value);
} else {
userWorkloadsConfigMap_ = value;
}
} else {
userWorkloadsConfigMapBuilder_.mergeFrom(value);
}
if (userWorkloadsConfigMap_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. User workloads ConfigMap to create.
* </pre>
*
* <code>
* .google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUserWorkloadsConfigMap() {
bitField0_ = (bitField0_ & ~0x00000002);
userWorkloadsConfigMap_ = null;
if (userWorkloadsConfigMapBuilder_ != null) {
userWorkloadsConfigMapBuilder_.dispose();
userWorkloadsConfigMapBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. User workloads ConfigMap to create.
* </pre>
*
* <code>
* .google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap.Builder
getUserWorkloadsConfigMapBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUserWorkloadsConfigMapFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. User workloads ConfigMap to create.
* </pre>
*
* <code>
* .google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMapOrBuilder
getUserWorkloadsConfigMapOrBuilder() {
if (userWorkloadsConfigMapBuilder_ != null) {
return userWorkloadsConfigMapBuilder_.getMessageOrBuilder();
} else {
return userWorkloadsConfigMap_ == null
? com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap
.getDefaultInstance()
: userWorkloadsConfigMap_;
}
}
/**
*
*
* <pre>
* Required. User workloads ConfigMap to create.
* </pre>
*
* <code>
* .google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap,
com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap.Builder,
com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMapOrBuilder>
getUserWorkloadsConfigMapFieldBuilder() {
if (userWorkloadsConfigMapBuilder_ == null) {
userWorkloadsConfigMapBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap,
com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMap.Builder,
com.google.cloud.orchestration.airflow.service.v1.UserWorkloadsConfigMapOrBuilder>(
getUserWorkloadsConfigMap(), getParentForChildren(), isClean());
userWorkloadsConfigMap_ = null;
}
return userWorkloadsConfigMapBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest)
private static final com.google.cloud.orchestration.airflow.service.v1
.CreateUserWorkloadsConfigMapRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest();
}
public static com.google.cloud.orchestration.airflow.service.v1
.CreateUserWorkloadsConfigMapRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateUserWorkloadsConfigMapRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateUserWorkloadsConfigMapRequest>() {
@java.lang.Override
public CreateUserWorkloadsConfigMapRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateUserWorkloadsConfigMapRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateUserWorkloadsConfigMapRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.orchestration.airflow.service.v1.CreateUserWorkloadsConfigMapRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/xmlbeans | 36,062 | src/main/java/org/apache/xmlbeans/GDate.java | /* Copyright 2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.xmlbeans;
import org.apache.xmlbeans.impl.util.ExceptionUtil;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.math.RoundingMode;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.TimeZone;
/**
* Represents an XML Schema-compatible Gregorian date.
* <p>
* There are many date types in XML Schema, and this type
* represents the natural union of all those types. A GDate
* can hold any subset of date fields (Year, Month, Day, Time,
* Timezone, or some combination). Wherever the specification
* provides guidance, the guidelines in the
* <a target="_blank" href="http://www.w3.org/TR/xmlschema-2/">XML Schema 1.0 specification</a>
* (plus <a target="_blank" href="http://www.w3.org/2001/05/xmlschema-errata">published errata</a>) are followed.
* <p>
* Instances may separately have values or no values for
* the year, month, day-of-month, and time-of-day. Not all
* operations are meaningful on all combinations.
*/
public final class GDate implements GDateSpecification, java.io.Serializable {
private static final long serialVersionUID = 1L;
// XMLSchema spec requires support only for years 1 to 9999, but XMLBeans covers more up to the following limitations
// to avoid losing precision when transforming to a java.util.Date
static final int MAX_YEAR = 292277265; // is Long.MAX_VALUE ms in years - 1 (for the 11month, 31days, 23h, 59m, 59sec case).
static final int MIN_YEAR = -292275295; // is Long.MIN_VALUE ms in years + 1970 + 1
// for fast equality comparison, hashing, and serialization
private transient String _canonicalString;
private transient String _string;
private int _bits;
private int _CY;
private int _M;
private int _D;
private int _h;
private int _m;
private int _s;
private BigDecimal _fs;
private int _tzsign;
private int _tzh;
private int _tzm;
/* package */ static final BigDecimal _zero = BigDecimal.ZERO;
/* package */ static final BigDecimal _one = BigDecimal.ONE;
/**
* Constructs a GDate based on a lexical representation.
*/
public GDate(CharSequence string) {
// first trim XML whitespace
int len = string.length();
int start = 0;
while (len > 0 && isSpace(string.charAt(len - 1))) {
len -= 1;
}
while (start < len && isSpace(string.charAt(start))) {
start += 1;
}
// pick optional timezone off the end
if (len - start >= 1 && string.charAt(len - 1) == 'Z') {
_bits |= HAS_TIMEZONE;
len -= 1;
} else if (len - start >= 6) {
timezone:
{
int tzsign;
int tzhour;
int tzminute;
if (string.charAt(len - 3) != ':') {
break timezone;
}
switch (string.charAt(len - 6)) {
case '-':
tzsign = -1;
break;
case '+':
tzsign = 1;
break;
default:
break timezone;
}
tzhour = twoDigit(string, len - 5);
tzminute = twoDigit(string, len - 2);
if (tzhour > 14) {
throw new IllegalArgumentException("time zone hour must be two digits between -14 and +14");
}
if (tzminute > 59) {
throw new IllegalArgumentException("time zone minute must be two digits between 00 and 59");
}
_bits |= HAS_TIMEZONE;
_tzsign = tzsign;
_tzh = tzhour;
_tzm = tzminute;
len -= 6;
}
}
// pick date fields off the beginning if it doesn't look like a time
if (start < len && (start + 2 >= len || string.charAt(start + 2) != ':')) {
scandate:
{
// parse year sign
boolean negyear = false;
if (string.charAt(start) == '-') {
negyear = true;
start += 1;
}
// scan year digits
int value = 0;
int digits = -start;
char ch;
boolean startsWithZero = start < len && digitVal(string.charAt(start)) == 0;
for (; ; ) {
ch = start < len ? string.charAt(start) : '\0';
if (!isDigit(ch)) {
break;
}
if (startsWithZero && start + digits >= 4) {
throw new IllegalArgumentException("year value starting with zero must be 4 or less digits: " + string);
}
value = value * 10 + digitVal(ch);
start += 1;
}
digits += start;
if (digits > 9) {
throw new IllegalArgumentException("year too long (up to 9 digits)");
} else if (digits >= 4) {
_bits |= HAS_YEAR;
_CY = negyear ? -value : value;
if (_CY == 0) {
throw new IllegalArgumentException("year must not be zero");
}
} else if (digits > 0) {
throw new IllegalArgumentException("year must be four digits (may pad with zeroes, e.g., 0560)");
}
if (_CY > MAX_YEAR) {
throw new IllegalArgumentException("year value not supported: too big, must be less than " + MAX_YEAR);
}
if (_CY < MIN_YEAR) {
throw new IllegalArgumentException("year values not supported: too small, must be bigger than " + MIN_YEAR);
}
// hyphen introduces a month
if (ch != '-') {
if (negyear && !hasYear()) {
throw new IllegalArgumentException(); // a single minus
} else {
break scandate;
}
}
start += 1;
// two-digit month
if (len - start >= 2) {
value = twoDigit(string, start);
if (value >= 1 && value <= 12) {
_bits |= HAS_MONTH;
_M = value;
start += 2;
}
}
// hyphen introduces a day
ch = start < len ? string.charAt(start) : '\0';
if (ch != '-') {
if (!hasMonth()) {
throw new IllegalArgumentException(); // minus after a year
} else {
break scandate;
}
}
start += 1;
// two-digit day
if (len - start >= 2) {
value = twoDigit(string, start);
if (value >= 1 && value <= 31) {
_bits |= HAS_DAY;
_D = value;
start += 2;
}
}
if (!hasDay()) {
// error in the original schema spec permits an extra '-' here
if (hasMonth() && !hasYear()) {
ch = start < len ? string.charAt(start) : '\0';
if (ch == '-') {
start += 1;
break scandate;
}
}
throw new IllegalArgumentException(); // minus after a month
}
}
}
// time
if (start < len) {
if (hasYear() || hasMonth() || hasDay()) {
if (string.charAt(start) != 'T') {
throw new IllegalArgumentException("date and time must be separated by 'T'");
}
start += 1;
}
if (len < start + 8 || string.charAt(start + 2) != ':' || string.charAt(start + 5) != ':') {
throw new IllegalArgumentException();
}
int h = twoDigit(string, start);
if (h > 24) {
throw new IllegalArgumentException("hour must be between 00 and 23");
}
int m = twoDigit(string, start + 3);
if (m >= 60) {
throw new IllegalArgumentException("minute must be between 00 and 59");
}
int s = twoDigit(string, start + 6);
if (s >= 60) {
throw new IllegalArgumentException("second must be between 00 and 59");
}
start += 8;
BigDecimal fs = _zero;
if (start < len) {
if (string.charAt(start) != '.') {
throw new IllegalArgumentException();
}
if (start + 1 < len) {
for (int i = start + 1; i < len; i++) {
if (!isDigit(string.charAt(i))) {
throw new IllegalArgumentException();
}
}
try {
fs = new BigDecimal(string.subSequence(start, len).toString());
} catch (Throwable e) {
if (ExceptionUtil.isFatal(e)) {
ExceptionUtil.rethrow(e);
}
throw new IllegalArgumentException();
}
}
}
_bits |= HAS_TIME;
_h = h;
_m = m;
_s = s;
_fs = fs;
}
if (hasTime() && _h == 24) {
if (_m != 0 || _s != 0 || _fs.compareTo(_zero) != 0) {
throw new IllegalArgumentException("if hour is 24, minutes, seconds and fraction must be 0");
} else { // normalize to next day if it has date or at least has day
if (hasDate()) {
GDateBuilder gdb = new GDateBuilder(_CY, _M, _D, _h, _m, _s, _fs, _tzsign, _tzh, _tzm);
gdb.normalize24h();
_D = gdb.getDay();
_M = gdb.getMonth();
_CY = gdb.getYear();
_h = 0;
} else if (hasDay()) // if no date only days increment
{
_D++;
_h = 0;
}
}
}
if (!isValid()) {
throw new IllegalArgumentException("invalid date");
}
}
/**
* Constructs a GDate with the specified year, month, day,
* hours, minutes, seconds, and optional fractional seconds, in
* an unspecified timezone.
* <p>
* Note that by not specifying the timezone the GDate
* becomes partially unordered with respect to times that
* do have a specified timezone.
*/
public GDate(
int year,
int month,
int day,
int hour,
int minute,
int second,
BigDecimal fraction) {
_bits = HAS_YEAR | HAS_MONTH | HAS_DAY | HAS_TIME;
_CY = year;
_M = month;
_D = day;
_h = hour;
_m = minute;
_s = second;
_fs = fraction == null ? _zero : fraction;
if (!isValid()) {
throw new IllegalArgumentException();
}
}
/**
* Constructs an absolute GDate with the specified year,
* month, day, hours, minutes, seconds, and optional fractional
* seconds, and in the timezone specified.
* <p>
* If you wish to have a time or date that isn't in a specified timezone,
* then use the constructor that does not include the timezone arguments.
*/
public GDate(
int year,
int month,
int day,
int hour,
int minute,
int second,
BigDecimal fraction,
int tzSign,
int tzHour,
int tzMinute) {
_bits = HAS_TIMEZONE | HAS_YEAR | HAS_MONTH | HAS_DAY | HAS_TIME;
_CY = year;
_M = month;
_D = day;
_h = hour;
_m = minute;
_s = second;
_fs = fraction == null ? _zero : fraction;
_tzsign = tzSign;
_tzh = tzHour;
_tzm = tzMinute;
if (!isValid()) {
throw new IllegalArgumentException();
}
}
/**
* Constructs a GDate based on a java.util.Date.
* <p>
* The current offset of the default timezone is used as the timezone.
* <p>
* For example, if eastern daylight time is in effect at the given
* date, the timezone on the east coast of the United States
* translates to GMT-05:00 (EST) + 1:00 (DT offset) == GMT-04:00.
*/
public GDate(Date date) {
// requires some date math, so ctor lives on GDateBuilder
this(new GDateBuilder(date));
}
/**
* Constructs a GDate based on a java.util.Calendar.
* <p>
* If the calendar does not have some fields set, the same absence
* of information is reflected in the GDate. Note that
* java.util.GregorianCalendar fills in all fields as soon as any
* are fetched, so constructing a GDate with the same calendar object
* twice may result in a different GDate because of a changed calendar.
* Note that org.apache.xmlbeans.XmlCalendar is stable if you re-get a set field,
* so it does not have the same problem.
*/
public GDate(Calendar calendar) {
// we must scrape the "isSet" information out before accessing anything
boolean isSetYear = calendar.isSet(Calendar.YEAR);
boolean isSetEra = calendar.isSet(Calendar.ERA);
boolean isSetMonth = calendar.isSet(Calendar.MONTH);
boolean isSetDay = calendar.isSet(Calendar.DAY_OF_MONTH);
boolean isSetHourOfDay = calendar.isSet(Calendar.HOUR_OF_DAY);
boolean isSetHour = calendar.isSet(Calendar.HOUR);
boolean isSetAmPm = calendar.isSet(Calendar.AM_PM);
boolean isSetMinute = calendar.isSet(Calendar.MINUTE);
boolean isSetSecond = calendar.isSet(Calendar.SECOND);
boolean isSetMillis = calendar.isSet(Calendar.MILLISECOND);
boolean isSetZone = calendar.isSet(Calendar.ZONE_OFFSET);
boolean isSetDst = calendar.isSet(Calendar.DST_OFFSET);
if (isSetYear) {
int y = calendar.get(Calendar.YEAR);
if (isSetEra && calendar instanceof GregorianCalendar) {
if (calendar.get(Calendar.ERA) == GregorianCalendar.BC) {
y = -y; //1 - y;
}
}
_bits |= HAS_YEAR;
_CY = y;
}
if (isSetMonth) {
_bits |= HAS_MONTH;
_M = calendar.get(Calendar.MONTH) + 1; // !!note
}
if (isSetDay) {
_bits |= HAS_DAY;
_D = calendar.get(Calendar.DAY_OF_MONTH);
}
boolean gotTime = false;
int h = 0;
int m = 0;
int s = 0;
BigDecimal fs = _zero;
if (isSetHourOfDay) {
h = calendar.get(Calendar.HOUR_OF_DAY);
gotTime = true;
} else if (isSetHour && isSetAmPm) {
h = calendar.get(Calendar.HOUR) + calendar.get(Calendar.AM_PM) * 12;
gotTime = true;
}
if (isSetMinute) {
m = calendar.get(Calendar.MINUTE);
gotTime = true;
}
if (isSetSecond) {
s = calendar.get(Calendar.SECOND);
gotTime = true;
}
if (isSetMillis) {
fs = BigDecimal.valueOf(calendar.get(Calendar.MILLISECOND), 3);
gotTime = true;
}
if (gotTime) {
_bits |= HAS_TIME;
_h = h;
_m = m;
_s = s;
_fs = fs;
}
if (isSetZone) {
int zoneOffsetInMilliseconds = calendar.get(Calendar.ZONE_OFFSET);
if (isSetDst) {
zoneOffsetInMilliseconds += calendar.get(Calendar.DST_OFFSET);
}
_bits |= HAS_TIMEZONE;
if (zoneOffsetInMilliseconds == 0) {
_tzsign = 0;
_tzh = 0;
_tzm = 0;
TimeZone zone = calendar.getTimeZone();
String id = zone.getID();
if (id != null && id.length() > 3) {
switch (id.charAt(3)) {
case '+':
_tzsign = 1;
break; // GMT+00:00
case '-':
_tzsign = -1;
break; // GMT-00:00
}
}
} else {
_tzsign = (zoneOffsetInMilliseconds < 0 ? -1 : +1);
zoneOffsetInMilliseconds = zoneOffsetInMilliseconds * _tzsign;
_tzh = zoneOffsetInMilliseconds / 3600000;
_tzm = (zoneOffsetInMilliseconds - _tzh * 3600000) / 60000;
}
}
}
/**
* Constructs a GDate based on another GDateSpecification.
*/
public GDate(GDateSpecification gdate) {
if (gdate.hasTimeZone()) {
_bits |= HAS_TIMEZONE;
_tzsign = gdate.getTimeZoneSign();
_tzh = gdate.getTimeZoneHour();
_tzm = gdate.getTimeZoneMinute();
}
if (gdate.hasTime()) {
_bits |= HAS_TIME;
_h = gdate.getHour();
_m = gdate.getMinute();
_s = gdate.getSecond();
_fs = gdate.getFraction();
}
if (gdate.hasDay()) {
_bits |= HAS_DAY;
_D = gdate.getDay();
}
if (gdate.hasMonth()) {
_bits |= HAS_MONTH;
_M = gdate.getMonth();
}
if (gdate.hasYear()) {
_bits |= HAS_YEAR;
_CY = gdate.getYear();
}
}
/* package */
static boolean isDigit(char ch) {
return ((char) (ch - '0') <= '9' - '0'); // char is unsigned
}
/* package */
static boolean isSpace(char ch) {
switch (ch) {
case ' ':
case '\t':
case '\r':
case '\n':
return true;
default:
return false;
}
}
/* package */
static int digitVal(char ch) {
return (ch - '0');
}
private static int twoDigit(CharSequence str, int index) {
char ch1 = str.charAt(index);
char ch2 = str.charAt(index + 1);
if (!isDigit(ch1) || !isDigit(ch2)) {
return 100; // not two digits
}
return digitVal(ch1) * 10 + digitVal(ch2);
}
/**
* Returns true: all GDate instances are immutable.
*/
public final boolean isImmutable() {
return true;
}
/**
* Returns a combination of flags indicating the information
* contained by this GDate. The five flags are
* HAS_TIMEZONE, HAS_YEAR, HAS_MONTH, HAS_DAY, and HAS_TIME.
*/
public int getFlags() {
return _bits;
}
/**
* True if this date/time specification specifies a timezone.
*/
public final boolean hasTimeZone() {
return ((_bits & HAS_TIMEZONE) != 0);
}
/**
* True if this date/time specification specifies a year.
*/
public final boolean hasYear() {
return ((_bits & HAS_YEAR) != 0);
}
/**
* True if this date/time specification specifies a month-of-year.
*/
public final boolean hasMonth() {
return ((_bits & HAS_MONTH) != 0);
}
/**
* True if this date/time specification specifies a day-of-month.
*/
public final boolean hasDay() {
return ((_bits & HAS_DAY) != 0);
}
/**
* True if this date/time specification specifies a time-of-day.
*/
public final boolean hasTime() {
return ((_bits & HAS_TIME) != 0);
}
/**
* True if this date/time specification specifies a full date (year, month, day)
*/
public final boolean hasDate() {
return ((_bits & (HAS_DAY | HAS_MONTH | HAS_YEAR)) == (HAS_DAY | HAS_MONTH | HAS_YEAR));
}
/**
* Gets the year. Should be a four-digit year specification.
*/
public final int getYear() {
return _CY;
}
/**
* Gets the month-of-year. January is 1.
*/
public final int getMonth() {
return _M;
}
/**
* Gets the day-of-month. The first day of each month is 1.
*/
public final int getDay() {
return _D;
}
/**
* Gets the hour-of-day. Midnight is 0, and 11PM is 23.
*/
public final int getHour() {
return _h;
}
/**
* Gets the minute-of-hour. Range from 0 to 59.
*/
public final int getMinute() {
return _m;
}
/**
* Gets the second-of-minute. Range from 0 to 59.
*/
public final int getSecond() {
return _s;
}
/**
* Gets the fraction-of-second. Range from 0 (inclusive) to 1 (exclusive).
*/
public final BigDecimal getFraction() {
return _fs;
}
/**
* Gets the time zone sign. For time zones east of GMT,
* this is positive; for time zones west, this is negative.
*/
public final int getTimeZoneSign() {
return _tzsign;
}
/**
* Gets the time zone hour.
* <p>
* This is always positive: for the sign, look at
* getTimeZoneSign().
*/
public final int getTimeZoneHour() {
return _tzh;
}
/**
* Gets the time zone minutes.
* <p>
* This is always positive: for the sign, look at
* getTimeZoneSign().
*/
public final int getTimeZoneMinute() {
return _tzm;
}
/**
* Gets the rounded millisecond value. Range from 0 to 999
*/
public int getMillisecond() {
if (_fs == null) {
return 0;
}
return _fs.setScale(3, RoundingMode.DOWN).unscaledValue().intValue();
}
/**
* The canonical string representation. Specific moments or
* times-of-day in a specified timezone are normalized to
* UTC time to produce a canonical string form for them.
* Other recurring time specifications keep their timezone
* information.
*/
public String canonicalString() {
ensureCanonicalString();
return _canonicalString;
}
/**
* True if this GDate corresponds to a valid gregorian date value
* in XML schema.
*/
public boolean isValid() {
return GDateBuilder.isValidGDate(this);
}
/**
* Returns the Julian date corresponding to this Gregorian date.
* The Julian date (JD) is a continuous count of days from
* 1 January 4713 BC.
*/
public int getJulianDate() {
return GDateBuilder.julianDateForGDate(this);
}
/**
* Retrieves the value of the current time as an {@link XmlCalendar}.
* <p>
* {@link XmlCalendar} is a subclass of {@link java.util.GregorianCalendar}
* which is slightly customized to match XML schema date rules.
* <p>
* The returned {@link XmlCalendar} has only those time and date fields
* set that are reflected in the GDate object. Because of the way the
* {@link java.util.Calendar} contract works, any information in the isSet() vanishes
* as soon as you view any unset field using get() methods.
* This means that if it is important to understand which date fields
* are set, you must call isSet() first before get().
*/
public XmlCalendar getCalendar() {
return new XmlCalendar(this);
}
/**
* Retrieves the value of the current time as a java.util.Date
* instance.
*/
public Date getDate() {
return GDateBuilder.dateForGDate(this);
}
/**
* Comparison to another GDate.
* <ul>
* <li>Returns -1 if this < date. (less-than)
* <li>Returns 0 if this == date. (equal)
* <li>Returns 1 if this > date. (greater-than)
* <li>Returns 2 if this <> date. (incomparable)
* </ul>
* Two instances are incomparable if they have different amounts
* of information.
*/
public int compareToGDate(GDateSpecification datespec) {
return GDateBuilder.compareGDate(this, datespec);
}
/**
* Returns the builtin type code for the shape of the information
* contained in this instance, or 0 if the
* instance doesn't contain information corresponding to a
* Schema type.
* <p>
* Value will be equal to
* {@link SchemaType#BTC_NOT_BUILTIN},
* {@link SchemaType#BTC_G_YEAR},
* {@link SchemaType#BTC_G_YEAR_MONTH},
* {@link SchemaType#BTC_G_MONTH},
* {@link SchemaType#BTC_G_MONTH_DAY},
* {@link SchemaType#BTC_G_DAY},
* {@link SchemaType#BTC_DATE},
* {@link SchemaType#BTC_DATE_TIME}, or
* {@link SchemaType#BTC_TIME}.
*/
public int getBuiltinTypeCode() {
return GDateBuilder.btcForFlags(_bits);
}
/**
* Adds a duration to this GDate, and returns a new GDate.
*/
public GDate add(GDurationSpecification duration) {
GDateBuilder builder = new GDateBuilder(this);
builder.addGDuration(duration);
return builder.toGDate();
}
/**
* Adds a duration to this GDate, and returns a new GDate.
*/
public GDate subtract(GDurationSpecification duration) {
GDateBuilder builder = new GDateBuilder(this);
builder.subtractGDuration(duration);
return builder.toGDate();
}
/**
* GDate is an immutable class, and equality is computed based
* on its canonical value.
*/
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof GDate)) {
return false;
}
ensureCanonicalString();
return _canonicalString.equals(((GDate) obj).canonicalString());
}
/**
* Returns a hash code for this GDate.
*/
public int hashCode() {
ensureCanonicalString();
return _canonicalString.hashCode();
}
/**
* The canonical string representation. Specific moments or
* times-of-day in a specified timezone are normalized to
* UTC time to produce a canonical string form for them.
* Other recurring time specifications keep their timezone
* information.
*/
private void ensureCanonicalString() {
if (_canonicalString != null) {
return;
}
boolean needNormalize =
(hasTimeZone() && getTimeZoneSign() != 0 && hasTime() &&
((hasDay() == hasMonth() && hasDay() == hasYear())));
if (!needNormalize && getFraction() != null && getFraction().scale() > 0) {
BigInteger bi = getFraction().unscaledValue();
needNormalize = (bi.mod(GDateBuilder.TEN).signum() == 0);
}
if (!needNormalize) {
_canonicalString = toString();
} else {
GDateBuilder gdb = new GDateBuilder(this);
gdb.normalize();
_canonicalString = gdb.toString();
}
}
/**
* The natural string representation. This represents the information
* that is available, including timezone. For types that correspond
* to defined schema types (schemaBuiltinTypeCode() > 0),
* this provides the natural lexical representation.
* <p>
* When both time and timezone are specified, this string is not
* the canonical representation unless the timezone is UTC (Z)
* (since the same moment in time can be expressed in different
* timezones). To get a canonical string, use the canonicalString()
* method.
*/
public String toString() {
if (_string == null) {
_string = formatGDate(this);
}
return _string;
}
private final static char[] _tensDigit =
{
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'1', '1', '1', '1', '1', '1', '1', '1', '1', '1',
'2', '2', '2', '2', '2', '2', '2', '2', '2', '2',
'3', '3', '3', '3', '3', '3', '3', '3', '3', '3',
'4', '4', '4', '4', '4', '4', '4', '4', '4', '4',
'5', '5', '5', '5', '5', '5', '5', '5', '5', '5',
'6', '6', '6', '6', '6', '6', '6', '6', '6', '6',
'7', '7', '7', '7', '7', '7', '7', '7', '7', '7',
'8', '8', '8', '8', '8', '8', '8', '8', '8', '8',
'9', '9', '9', '9', '9', '9', '9', '9', '9', '9',
};
private final static char[] _onesDigit =
{
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
};
private static int _padTwoAppend(char[] b, int i, int n) {
assert (n >= 0 && n < 100);
b[i] = _tensDigit[n];
b[i + 1] = _onesDigit[n];
return i + 2;
}
private static int _padFourAppend(char[] b, int n) {
int i = 0;
if (n < 0) {
b[i++] = '-';
n = -n;
}
if (n >= 10000) {
String s = Integer.toString(n);
s.getChars(0, s.length(), b, i);
return i + s.length();
}
int q = n / 100;
int r = n - q * 100;
b[i] = _tensDigit[q];
b[i + 1] = _onesDigit[q];
b[i + 2] = _tensDigit[r];
b[i + 3] = _onesDigit[r];
return i + 4;
}
private static final TimeZone GMTZONE = TimeZone.getTimeZone("GMT");
private static final TimeZone[] MINUSZONE =
{
TimeZone.getTimeZone("GMT-00:00"),
TimeZone.getTimeZone("GMT-01:00"),
TimeZone.getTimeZone("GMT-02:00"),
TimeZone.getTimeZone("GMT-03:00"),
TimeZone.getTimeZone("GMT-04:00"),
TimeZone.getTimeZone("GMT-05:00"),
TimeZone.getTimeZone("GMT-06:00"),
TimeZone.getTimeZone("GMT-07:00"),
TimeZone.getTimeZone("GMT-08:00"),
TimeZone.getTimeZone("GMT-09:00"),
TimeZone.getTimeZone("GMT-10:00"),
TimeZone.getTimeZone("GMT-11:00"),
TimeZone.getTimeZone("GMT-12:00"),
TimeZone.getTimeZone("GMT-13:00"),
TimeZone.getTimeZone("GMT-14:00"),
};
private static final TimeZone[] PLUSZONE =
{
TimeZone.getTimeZone("GMT+00:00"),
TimeZone.getTimeZone("GMT+01:00"),
TimeZone.getTimeZone("GMT+02:00"),
TimeZone.getTimeZone("GMT+03:00"),
TimeZone.getTimeZone("GMT+04:00"),
TimeZone.getTimeZone("GMT+05:00"),
TimeZone.getTimeZone("GMT+06:00"),
TimeZone.getTimeZone("GMT+07:00"),
TimeZone.getTimeZone("GMT+08:00"),
TimeZone.getTimeZone("GMT+09:00"),
TimeZone.getTimeZone("GMT+10:00"),
TimeZone.getTimeZone("GMT+11:00"),
TimeZone.getTimeZone("GMT+12:00"),
TimeZone.getTimeZone("GMT+13:00"),
TimeZone.getTimeZone("GMT+14:00"),
};
/* package */
static TimeZone timeZoneForGDate(GDateSpecification date) {
// use a cached timezone if integral; otherwise make a new one.
if (!date.hasTimeZone()) {
return TimeZone.getDefault();
}
if (date.getTimeZoneSign() == 0) {
return GMTZONE;
}
if (date.getTimeZoneMinute() == 0 && date.getTimeZoneHour() <= 14 && date.getTimeZoneHour() >= 0) {
return date.getTimeZoneSign() < 0 ? MINUSZONE[date.getTimeZoneHour()] : PLUSZONE[date.getTimeZoneHour()];
}
char[] zb = new char[9];
zb[0] = 'G';
zb[1] = 'M';
zb[2] = 'T';
zb[3] = (date.getTimeZoneSign() < 0) ? '-' : '+';
GDate._padTwoAppend(zb, 4, date.getTimeZoneHour());
zb[6] = ':';
GDate._padTwoAppend(zb, 7, date.getTimeZoneMinute());
return TimeZone.getTimeZone(new String(zb));
}
/* package */
static String formatGDate(GDateSpecification spec) {
// We've used a char[] rather than a StringBuffer for a 4x speedup
// -YY(10)YY-MM-DDTHH:MM:SS.FFFFFF+ZH:ZM
// 1 + 10 + 3+ 3+ 3+ 3+ 3+1 + s + 3+ 3 = 33 + s
BigDecimal fs = spec.getFraction();
char[] message = new char[33 + (fs == null ? 0 : fs.scale())];
int i = 0;
if (spec.hasYear() || spec.hasMonth() || spec.hasDay()) {
dmy:
{
if (spec.hasYear()) {
i = _padFourAppend(message, spec.getYear());
} else {
message[i++] = '-';
}
if (!(spec.hasMonth() || spec.hasDay())) {
break dmy;
}
message[i++] = '-';
if (spec.hasMonth()) {
i = _padTwoAppend(message, i, spec.getMonth());
}
if (!spec.hasDay()) {
break dmy;
}
message[i++] = '-';
i = _padTwoAppend(message, i, spec.getDay());
}
if (spec.hasTime()) {
message[i++] = 'T';
}
}
if (spec.hasTime()) {
i = _padTwoAppend(message, i, spec.getHour());
message[i++] = ':';
i = _padTwoAppend(message, i, spec.getMinute());
message[i++] = ':';
i = _padTwoAppend(message, i, spec.getSecond());
if (fs != null && !_zero.equals(fs)) // (optimization ~3%)
{
String frac = fs.toString();
int point = frac.indexOf('.');
if (point >= 0) {
frac.getChars(point, frac.length(), message, i);
i += frac.length() - point;
}
}
}
if (spec.hasTimeZone()) {
if (spec.getTimeZoneSign() == 0) {
message[i++] = 'Z';
} else {
message[i++] = spec.getTimeZoneSign() > 0 ? '+' : '-';
i = _padTwoAppend(message, i, spec.getTimeZoneHour());
message[i++] = ':';
i = _padTwoAppend(message, i, spec.getTimeZoneMinute());
}
}
// it would be nice to use (0, i, message) ctor instead
return new String(message, 0, i);
}
}
|
apache/iotdb | 35,985 | iotdb-core/consensus/src/main/java/org/apache/iotdb/consensus/config/RatisConfig.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.consensus.config;
import org.apache.iotdb.commons.client.property.ClientPoolProperty.DefaultProperty;
import org.apache.ratis.grpc.GrpcConfigKeys.Server;
import org.apache.ratis.server.RaftServerConfigKeys;
import org.apache.ratis.util.SizeInBytes;
import org.apache.ratis.util.TimeDuration;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
public class RatisConfig {
private final Rpc rpc;
private final LeaderElection leaderElection;
private final Snapshot snapshot;
private final ThreadPool threadPool;
private final Log log;
private final Grpc grpc;
private final Client client;
private final Impl impl;
private final LeaderLogAppender leaderLogAppender;
private final Read read;
private final Utils utils;
private RatisConfig(
Rpc rpc,
LeaderElection leaderElection,
Snapshot snapshot,
ThreadPool threadPool,
Log log,
Grpc grpc,
Client client,
Impl impl,
LeaderLogAppender leaderLogAppender,
Read read,
Utils utils) {
this.rpc = rpc;
this.leaderElection = leaderElection;
this.snapshot = snapshot;
this.threadPool = threadPool;
this.log = log;
this.grpc = grpc;
this.client = client;
this.impl = impl;
this.leaderLogAppender = leaderLogAppender;
this.read = read;
this.utils = utils;
}
public Rpc getRpc() {
return rpc;
}
public LeaderElection getLeaderElection() {
return leaderElection;
}
public Snapshot getSnapshot() {
return snapshot;
}
public ThreadPool getThreadPool() {
return threadPool;
}
public Log getLog() {
return log;
}
public Grpc getGrpc() {
return grpc;
}
public Client getClient() {
return client;
}
public Impl getImpl() {
return impl;
}
public LeaderLogAppender getLeaderLogAppender() {
return leaderLogAppender;
}
public Read getRead() {
return read;
}
public Utils getUtils() {
return utils;
}
public static Builder newBuilder() {
return new Builder();
}
public static class Builder {
private Rpc rpc;
private LeaderElection leaderElection;
private Snapshot snapshot;
private ThreadPool threadPool;
private Log log;
private Grpc grpc;
private Client client;
private Impl impl;
private LeaderLogAppender leaderLogAppender;
private Read read;
private Utils utils;
public RatisConfig build() {
return new RatisConfig(
Optional.ofNullable(rpc).orElseGet(() -> Rpc.newBuilder().build()),
Optional.ofNullable(leaderElection).orElseGet(() -> LeaderElection.newBuilder().build()),
Optional.ofNullable(snapshot).orElseGet(() -> Snapshot.newBuilder().build()),
Optional.ofNullable(threadPool).orElseGet(() -> ThreadPool.newBuilder().build()),
Optional.ofNullable(log).orElseGet(() -> Log.newBuilder().build()),
Optional.ofNullable(grpc).orElseGet(() -> Grpc.newBuilder().build()),
Optional.ofNullable(client).orElseGet(() -> Client.newBuilder().build()),
Optional.ofNullable(impl).orElseGet(() -> Impl.newBuilder().build()),
Optional.ofNullable(leaderLogAppender)
.orElseGet(() -> LeaderLogAppender.newBuilder().build()),
Optional.ofNullable(read).orElseGet(() -> Read.newBuilder().build()),
Optional.ofNullable(utils).orElseGet(() -> Utils.newBuilder().build()));
}
public Builder setRpc(Rpc rpc) {
this.rpc = rpc;
return this;
}
public Builder setLeaderElection(LeaderElection leaderElection) {
this.leaderElection = leaderElection;
return this;
}
public Builder setSnapshot(Snapshot snapshot) {
this.snapshot = snapshot;
return this;
}
public Builder setThreadPool(ThreadPool threadPool) {
this.threadPool = threadPool;
return this;
}
public Builder setLog(Log log) {
this.log = log;
return this;
}
public Builder setGrpc(Grpc grpc) {
this.grpc = grpc;
return this;
}
public Builder setClient(Client client) {
this.client = client;
return this;
}
public Builder setImpl(Impl impl) {
this.impl = impl;
return this;
}
public Builder setLeaderLogAppender(LeaderLogAppender leaderLogAppender) {
this.leaderLogAppender = leaderLogAppender;
return this;
}
public Builder setRead(Read read) {
this.read = read;
return this;
}
public Builder setUtils(Utils utils) {
this.utils = utils;
return this;
}
}
/** server rpc timeout related. */
public static class Rpc {
private final TimeDuration timeoutMin;
private final TimeDuration timeoutMax;
private final TimeDuration requestTimeout;
private final TimeDuration sleepTime;
private final TimeDuration slownessTimeout;
private final TimeDuration firstElectionTimeoutMin;
private final TimeDuration firstElectionTimeoutMax;
private Rpc(
TimeDuration timeoutMin,
TimeDuration timeoutMax,
TimeDuration requestTimeout,
TimeDuration sleepTime,
TimeDuration slownessTimeout,
TimeDuration firstElectionTimeoutMin,
TimeDuration firstElectionTimeoutMax) {
this.timeoutMin = timeoutMin;
this.timeoutMax = timeoutMax;
this.requestTimeout = requestTimeout;
this.sleepTime = sleepTime;
this.slownessTimeout = slownessTimeout;
this.firstElectionTimeoutMin = firstElectionTimeoutMin;
this.firstElectionTimeoutMax = firstElectionTimeoutMax;
}
public TimeDuration getTimeoutMin() {
return timeoutMin;
}
public TimeDuration getTimeoutMax() {
return timeoutMax;
}
public TimeDuration getRequestTimeout() {
return requestTimeout;
}
public TimeDuration getSleepTime() {
return sleepTime;
}
public TimeDuration getSlownessTimeout() {
return slownessTimeout;
}
public TimeDuration getFirstElectionTimeoutMin() {
return firstElectionTimeoutMin;
}
public TimeDuration getFirstElectionTimeoutMax() {
return firstElectionTimeoutMax;
}
public static Rpc.Builder newBuilder() {
return new Rpc.Builder();
}
public static class Builder {
private TimeDuration timeoutMin = TimeDuration.valueOf(2, TimeUnit.SECONDS);
private TimeDuration timeoutMax = TimeDuration.valueOf(4, TimeUnit.SECONDS);
private TimeDuration requestTimeout = TimeDuration.valueOf(20, TimeUnit.SECONDS);
private TimeDuration sleepTime = TimeDuration.valueOf(1, TimeUnit.SECONDS);
private TimeDuration slownessTimeout = TimeDuration.valueOf(120, TimeUnit.SECONDS);
private TimeDuration firstElectionTimeoutMin =
TimeDuration.valueOf(50, TimeUnit.MILLISECONDS);
private TimeDuration firstElectionTimeoutMax =
TimeDuration.valueOf(150, TimeUnit.MILLISECONDS);
public Rpc build() {
return new Rpc(
timeoutMin,
timeoutMax,
requestTimeout,
sleepTime,
slownessTimeout,
firstElectionTimeoutMin,
firstElectionTimeoutMax);
}
public Rpc.Builder setTimeoutMin(TimeDuration timeoutMin) {
this.timeoutMin = timeoutMin;
return this;
}
public Rpc.Builder setTimeoutMax(TimeDuration timeoutMax) {
this.timeoutMax = timeoutMax;
return this;
}
public Rpc.Builder setRequestTimeout(TimeDuration requestTimeout) {
this.requestTimeout = requestTimeout;
return this;
}
public Rpc.Builder setSleepTime(TimeDuration sleepTime) {
this.sleepTime = sleepTime;
return this;
}
public Rpc.Builder setSlownessTimeout(TimeDuration slownessTimeout) {
this.slownessTimeout = slownessTimeout;
return this;
}
public Rpc.Builder setFirstElectionTimeoutMax(TimeDuration firstElectionTimeoutMax) {
this.firstElectionTimeoutMax = firstElectionTimeoutMax;
return this;
}
public Rpc.Builder setFirstElectionTimeoutMin(TimeDuration firstElectionTimeoutMin) {
this.firstElectionTimeoutMin = firstElectionTimeoutMin;
return this;
}
}
}
public static class LeaderElection {
private final TimeDuration leaderStepDownWaitTimeKey;
private final boolean preVote;
private LeaderElection(TimeDuration leaderStepDownWaitTimeKey, boolean preVote) {
this.leaderStepDownWaitTimeKey = leaderStepDownWaitTimeKey;
this.preVote = preVote;
}
public TimeDuration getLeaderStepDownWaitTimeKey() {
return leaderStepDownWaitTimeKey;
}
public boolean isPreVote() {
return preVote;
}
public static LeaderElection.Builder newBuilder() {
return new LeaderElection.Builder();
}
public static class Builder {
private TimeDuration leaderStepDownWaitTimeKey = TimeDuration.valueOf(30, TimeUnit.SECONDS);
private boolean preVote = RaftServerConfigKeys.LeaderElection.PRE_VOTE_DEFAULT;
public LeaderElection build() {
return new LeaderElection(leaderStepDownWaitTimeKey, preVote);
}
public LeaderElection.Builder setLeaderStepDownWaitTimeKey(
TimeDuration leaderStepDownWaitTimeKey) {
this.leaderStepDownWaitTimeKey = leaderStepDownWaitTimeKey;
return this;
}
public LeaderElection.Builder setPreVote(boolean preVote) {
this.preVote = preVote;
return this;
}
}
}
public static class Snapshot {
private final boolean autoTriggerEnabled;
private final long creationGap;
private final long autoTriggerThreshold;
private final int retentionFileNum;
private Snapshot(
boolean autoTriggerEnabled,
long creationGap,
long autoTriggerThreshold,
int retentionFileNum) {
this.autoTriggerEnabled = autoTriggerEnabled;
this.creationGap = creationGap;
this.autoTriggerThreshold = autoTriggerThreshold;
this.retentionFileNum = retentionFileNum;
}
public boolean isAutoTriggerEnabled() {
return autoTriggerEnabled;
}
public long getCreationGap() {
return creationGap;
}
public long getAutoTriggerThreshold() {
return autoTriggerThreshold;
}
public int getRetentionFileNum() {
return retentionFileNum;
}
public static Snapshot.Builder newBuilder() {
return new Snapshot.Builder();
}
public static class Builder {
private boolean autoTriggerEnabled = true;
private long creationGap = RaftServerConfigKeys.Snapshot.CREATION_GAP_DEFAULT;
private long autoTriggerThreshold =
RaftServerConfigKeys.Snapshot.AUTO_TRIGGER_THRESHOLD_DEFAULT;
private int retentionFileNum = RaftServerConfigKeys.Snapshot.RETENTION_FILE_NUM_DEFAULT;
public Snapshot build() {
return new Snapshot(
autoTriggerEnabled, creationGap, autoTriggerThreshold, retentionFileNum);
}
public Snapshot.Builder setAutoTriggerEnabled(boolean autoTriggerEnabled) {
this.autoTriggerEnabled = autoTriggerEnabled;
return this;
}
public Snapshot.Builder setCreationGap(long creationGap) {
this.creationGap = creationGap;
return this;
}
public Snapshot.Builder setAutoTriggerThreshold(long autoTriggerThreshold) {
this.autoTriggerThreshold = autoTriggerThreshold;
return this;
}
public Snapshot.Builder setRetentionFileNum(int retentionFileNum) {
this.retentionFileNum = retentionFileNum;
return this;
}
}
}
public static class ThreadPool {
private final boolean proxyCached;
private final int proxySize;
private final boolean serverCached;
private final int serverSize;
private final boolean clientCached;
private final int clientSize;
private ThreadPool(
boolean proxyCached,
int proxySize,
boolean serverCached,
int serverSize,
boolean clientCached,
int clientSize) {
this.proxyCached = proxyCached;
this.proxySize = proxySize;
this.serverCached = serverCached;
this.serverSize = serverSize;
this.clientCached = clientCached;
this.clientSize = clientSize;
}
public boolean isProxyCached() {
return proxyCached;
}
public int getProxySize() {
return proxySize;
}
public boolean isServerCached() {
return serverCached;
}
public int getServerSize() {
return serverSize;
}
public boolean isClientCached() {
return clientCached;
}
public int getClientSize() {
return clientSize;
}
public static ThreadPool.Builder newBuilder() {
return new ThreadPool.Builder();
}
public static class Builder {
private boolean proxyCached = RaftServerConfigKeys.ThreadPool.PROXY_CACHED_DEFAULT;
private int proxySize = RaftServerConfigKeys.ThreadPool.PROXY_SIZE_DEFAULT;
private boolean serverCached = RaftServerConfigKeys.ThreadPool.SERVER_CACHED_DEFAULT;
private int serverSize = RaftServerConfigKeys.ThreadPool.SERVER_SIZE_DEFAULT;
private boolean clientCached = RaftServerConfigKeys.ThreadPool.CLIENT_CACHED_DEFAULT;
private int clientSize = RaftServerConfigKeys.ThreadPool.CLIENT_SIZE_DEFAULT;
public ThreadPool build() {
return new ThreadPool(
proxyCached, proxySize, serverCached, serverSize, clientCached, clientSize);
}
public ThreadPool.Builder setProxyCached(boolean proxyCached) {
this.proxyCached = proxyCached;
return this;
}
public ThreadPool.Builder setProxySize(int proxySize) {
this.proxySize = proxySize;
return this;
}
public ThreadPool.Builder setServerCached(boolean serverCached) {
this.serverCached = serverCached;
return this;
}
public ThreadPool.Builder setServerSize(int serverSize) {
this.serverSize = serverSize;
return this;
}
public ThreadPool.Builder setClientCached(boolean clientCached) {
this.clientCached = clientCached;
return this;
}
public ThreadPool.Builder setClientSize(int clientSize) {
this.clientSize = clientSize;
return this;
}
}
}
public static class Log {
private final boolean useMemory;
private final int purgeGap;
private final boolean purgeUptoSnapshotIndex;
private final long preserveNumsWhenPurge;
private final SizeInBytes segmentSizeMax;
private final int segmentCacheNumMax;
private final SizeInBytes segmentCacheSizeMax;
private final SizeInBytes preallocatedSize;
private final int forceSyncNum;
private final boolean unsafeFlushEnabled;
private Log(
boolean useMemory,
int purgeGap,
boolean purgeUptoSnapshotIndex,
long preserveNumsWhenPurge,
SizeInBytes segmentSizeMax,
int segmentCacheNumMax,
SizeInBytes segmentCacheSizeMax,
SizeInBytes preallocatedSize,
int forceSyncNum,
boolean unsafeFlushEnabled) {
this.useMemory = useMemory;
this.purgeGap = purgeGap;
this.purgeUptoSnapshotIndex = purgeUptoSnapshotIndex;
this.preserveNumsWhenPurge = preserveNumsWhenPurge;
this.segmentSizeMax = segmentSizeMax;
this.segmentCacheNumMax = segmentCacheNumMax;
this.segmentCacheSizeMax = segmentCacheSizeMax;
this.preallocatedSize = preallocatedSize;
this.forceSyncNum = forceSyncNum;
this.unsafeFlushEnabled = unsafeFlushEnabled;
}
public boolean isUseMemory() {
return useMemory;
}
public int getPurgeGap() {
return purgeGap;
}
public boolean isPurgeUptoSnapshotIndex() {
return purgeUptoSnapshotIndex;
}
public SizeInBytes getSegmentSizeMax() {
return segmentSizeMax;
}
public int getSegmentCacheNumMax() {
return segmentCacheNumMax;
}
public SizeInBytes getSegmentCacheSizeMax() {
return segmentCacheSizeMax;
}
public SizeInBytes getPreallocatedSize() {
return preallocatedSize;
}
public int getForceSyncNum() {
return forceSyncNum;
}
public boolean isUnsafeFlushEnabled() {
return unsafeFlushEnabled;
}
public long getPreserveNumsWhenPurge() {
return preserveNumsWhenPurge;
}
public static Log.Builder newBuilder() {
return new Log.Builder();
}
public static class Builder {
private boolean useMemory = false;
private int purgeGap = 1024;
private boolean purgeUptoSnapshotIndex = true;
private long preserveNumsWhenPurge = 1000;
private SizeInBytes segmentSizeMax = SizeInBytes.valueOf("24MB");
private int segmentCacheNumMax = 2;
private SizeInBytes segmentCacheSizeMax = SizeInBytes.valueOf("200MB");
private SizeInBytes preallocatedSize = SizeInBytes.valueOf("4MB");
private int forceSyncNum = 128;
private boolean unsafeFlushEnabled = true;
public Log build() {
return new Log(
useMemory,
purgeGap,
purgeUptoSnapshotIndex,
preserveNumsWhenPurge,
segmentSizeMax,
segmentCacheNumMax,
segmentCacheSizeMax,
preallocatedSize,
forceSyncNum,
unsafeFlushEnabled);
}
public Log.Builder setUseMemory(boolean useMemory) {
this.useMemory = useMemory;
return this;
}
public Log.Builder setPurgeGap(int purgeGap) {
this.purgeGap = purgeGap;
return this;
}
public Log.Builder setPurgeUptoSnapshotIndex(boolean purgeUptoSnapshotIndex) {
this.purgeUptoSnapshotIndex = purgeUptoSnapshotIndex;
return this;
}
public Log.Builder setPreserveNumsWhenPurge(long preserveNumsWhenPurge) {
this.preserveNumsWhenPurge = preserveNumsWhenPurge;
return this;
}
public Log.Builder setSegmentSizeMax(SizeInBytes segmentSizeMax) {
this.segmentSizeMax = segmentSizeMax;
return this;
}
public Log.Builder setSegmentCacheNumMax(int segmentCacheNumMax) {
this.segmentCacheNumMax = segmentCacheNumMax;
return this;
}
public Log.Builder setSegmentCacheSizeMax(SizeInBytes segmentCacheSizeMax) {
this.segmentCacheSizeMax = segmentCacheSizeMax;
return this;
}
public Log.Builder setPreallocatedSize(SizeInBytes preallocatedSize) {
this.preallocatedSize = preallocatedSize;
return this;
}
public Log.Builder setForceSyncNum(int forceSyncNum) {
this.forceSyncNum = forceSyncNum;
return this;
}
public Log.Builder setUnsafeFlushEnabled(boolean unsafeFlushEnabled) {
this.unsafeFlushEnabled = unsafeFlushEnabled;
return this;
}
}
}
public static class Grpc {
private final SizeInBytes messageSizeMax;
private final SizeInBytes flowControlWindow;
private final boolean asyncRequestThreadPoolCached;
private final int asyncRequestThreadPoolSize;
private final int leaderOutstandingAppendsMax;
private final boolean isEnableSSL;
private final String sslTrustStorePath;
private final String sslTrustStorePassword;
private final String sslKeyStorePath;
private final String sslKeyStorePassword;
private Grpc(
SizeInBytes messageSizeMax,
SizeInBytes flowControlWindow,
boolean asyncRequestThreadPoolCached,
int asyncRequestThreadPoolSize,
int leaderOutstandingAppendsMax,
boolean isEnableSSL,
String sslTrustStorePath,
String sslTrustStorePassword,
String sslKeyStorePath,
String sslKeyStorePassword) {
this.messageSizeMax = messageSizeMax;
this.flowControlWindow = flowControlWindow;
this.asyncRequestThreadPoolCached = asyncRequestThreadPoolCached;
this.asyncRequestThreadPoolSize = asyncRequestThreadPoolSize;
this.leaderOutstandingAppendsMax = leaderOutstandingAppendsMax;
this.isEnableSSL = isEnableSSL;
this.sslTrustStorePath = sslTrustStorePath;
this.sslTrustStorePassword = sslTrustStorePassword;
this.sslKeyStorePath = sslKeyStorePath;
this.sslKeyStorePassword = sslKeyStorePassword;
}
public SizeInBytes getMessageSizeMax() {
return messageSizeMax;
}
public SizeInBytes getFlowControlWindow() {
return flowControlWindow;
}
public boolean isAsyncRequestThreadPoolCached() {
return asyncRequestThreadPoolCached;
}
public int getAsyncRequestThreadPoolSize() {
return asyncRequestThreadPoolSize;
}
public int getLeaderOutstandingAppendsMax() {
return leaderOutstandingAppendsMax;
}
public boolean isEnableSSL() {
return isEnableSSL;
}
public String getSslTrustStorePath() {
return sslTrustStorePath;
}
public String getSslTrustStorePassword() {
return sslTrustStorePassword;
}
public String getSslKeyStorePath() {
return sslKeyStorePath;
}
public String getSslKeyStorePassword() {
return sslKeyStorePassword;
}
public static Grpc.Builder newBuilder() {
return new Grpc.Builder();
}
public static class Builder {
private SizeInBytes messageSizeMax = SizeInBytes.valueOf("512MB");
private SizeInBytes flowControlWindow = SizeInBytes.valueOf("4MB");
private boolean asyncRequestThreadPoolCached =
Server.ASYNC_REQUEST_THREAD_POOL_CACHED_DEFAULT;
private int asyncRequestThreadPoolSize = Server.ASYNC_REQUEST_THREAD_POOL_SIZE_DEFAULT;
private int leaderOutstandingAppendsMax = Server.LEADER_OUTSTANDING_APPENDS_MAX_DEFAULT;
private boolean isEnableSSL = false;
private String sslTrustStorePath = "";
private String sslTrustStorePassword = "";
private String sslKeyStorePath = "";
private String sslKeyStorePassword = "";
public Grpc build() {
return new Grpc(
messageSizeMax,
flowControlWindow,
asyncRequestThreadPoolCached,
asyncRequestThreadPoolSize,
leaderOutstandingAppendsMax,
isEnableSSL,
sslTrustStorePath,
sslTrustStorePassword,
sslKeyStorePath,
sslKeyStorePassword);
}
public Grpc.Builder setMessageSizeMax(SizeInBytes messageSizeMax) {
this.messageSizeMax = messageSizeMax;
return this;
}
public Grpc.Builder setFlowControlWindow(SizeInBytes flowControlWindow) {
this.flowControlWindow = flowControlWindow;
return this;
}
public Grpc.Builder setAsyncRequestThreadPoolCached(boolean asyncRequestThreadPoolCached) {
this.asyncRequestThreadPoolCached = asyncRequestThreadPoolCached;
return this;
}
public Grpc.Builder setAsyncRequestThreadPoolSize(int asyncRequestThreadPoolSize) {
this.asyncRequestThreadPoolSize = asyncRequestThreadPoolSize;
return this;
}
public Grpc.Builder setLeaderOutstandingAppendsMax(int leaderOutstandingAppendsMax) {
this.leaderOutstandingAppendsMax = leaderOutstandingAppendsMax;
return this;
}
public Grpc.Builder setEnableSSL(boolean isEnableSSL) {
this.isEnableSSL = isEnableSSL;
return this;
}
public Grpc.Builder setSslTrustStorePath(String sslTrustStorePath) {
this.sslTrustStorePath = sslTrustStorePath;
return this;
}
public Grpc.Builder setSslTrustStorePassword(String sslTrustStorePassword) {
this.sslTrustStorePassword = sslTrustStorePassword;
return this;
}
public Grpc.Builder setSslKeyStorePath(String sslKeyStorePath) {
this.sslKeyStorePath = sslKeyStorePath;
return this;
}
public Grpc.Builder setSslKeyStorePassword(String sslKeyStorePassword) {
this.sslKeyStorePassword = sslKeyStorePassword;
return this;
}
}
}
public static class Client {
private final long clientRequestTimeoutMillis;
private final int clientMaxRetryAttempt;
private final long clientRetryInitialSleepTimeMs;
private final long clientRetryMaxSleepTimeMs;
private final int maxClientNumForEachNode;
public Client(
long clientRequestTimeoutMillis,
int clientMaxRetryAttempt,
long clientRetryInitialSleepTimeMs,
long clientRetryMaxSleepTimeMs,
int maxClientNumForEachNode) {
this.clientRequestTimeoutMillis = clientRequestTimeoutMillis;
this.clientMaxRetryAttempt = clientMaxRetryAttempt;
this.clientRetryInitialSleepTimeMs = clientRetryInitialSleepTimeMs;
this.clientRetryMaxSleepTimeMs = clientRetryMaxSleepTimeMs;
this.maxClientNumForEachNode = maxClientNumForEachNode;
}
public long getClientRequestTimeoutMillis() {
return clientRequestTimeoutMillis;
}
public int getClientMaxRetryAttempt() {
return clientMaxRetryAttempt;
}
public long getClientRetryInitialSleepTimeMs() {
return clientRetryInitialSleepTimeMs;
}
public long getClientRetryMaxSleepTimeMs() {
return clientRetryMaxSleepTimeMs;
}
public int getMaxClientNumForEachNode() {
return maxClientNumForEachNode;
}
public static Client.Builder newBuilder() {
return new Builder();
}
public static class Builder {
private long clientRequestTimeoutMillis = 10000;
private int clientMaxRetryAttempt = 10;
private long clientRetryInitialSleepTimeMs = 100;
private long clientRetryMaxSleepTimeMs = 10000;
private int maxClientNumForEachNode = DefaultProperty.MAX_CLIENT_NUM_FOR_EACH_NODE;
public Client build() {
return new Client(
clientRequestTimeoutMillis,
clientMaxRetryAttempt,
clientRetryInitialSleepTimeMs,
clientRetryMaxSleepTimeMs,
maxClientNumForEachNode);
}
public Builder setClientRequestTimeoutMillis(long clientRequestTimeoutMillis) {
this.clientRequestTimeoutMillis = clientRequestTimeoutMillis;
return this;
}
public Builder setClientMaxRetryAttempt(int clientMaxRetryAttempt) {
this.clientMaxRetryAttempt = clientMaxRetryAttempt;
return this;
}
public Builder setClientRetryInitialSleepTimeMs(long clientRetryInitialSleepTimeMs) {
this.clientRetryInitialSleepTimeMs = clientRetryInitialSleepTimeMs;
return this;
}
public Builder setClientRetryMaxSleepTimeMs(long clientRetryMaxSleepTimeMs) {
this.clientRetryMaxSleepTimeMs = clientRetryMaxSleepTimeMs;
return this;
}
public Builder setMaxClientNumForEachNode(int maxClientNumForEachNode) {
this.maxClientNumForEachNode = maxClientNumForEachNode;
return this;
}
}
}
public static class Impl {
private final int retryTimesMax;
private final long retryWaitMillis;
private final long retryMaxWaitMillis;
private final long checkAndTakeSnapshotInterval;
private final long raftLogSizeMaxThreshold;
private final long forceSnapshotInterval;
public Impl(
int retryTimesMax,
long retryWaitMillis,
long retryMaxWaitMillis,
long checkAndTakeSnapshotInterval,
long raftLogSizeMaxThreshold,
long forceSnapshotInterval) {
this.retryTimesMax = retryTimesMax;
this.retryWaitMillis = retryWaitMillis;
this.retryMaxWaitMillis = retryMaxWaitMillis;
this.checkAndTakeSnapshotInterval = checkAndTakeSnapshotInterval;
this.raftLogSizeMaxThreshold = raftLogSizeMaxThreshold;
this.forceSnapshotInterval = forceSnapshotInterval;
}
public int getRetryTimesMax() {
return retryTimesMax;
}
public long getRetryWaitMillis() {
return retryWaitMillis;
}
public long getCheckAndTakeSnapshotInterval() {
return checkAndTakeSnapshotInterval;
}
public long getRaftLogSizeMaxThreshold() {
return raftLogSizeMaxThreshold;
}
public long getForceSnapshotInterval() {
return forceSnapshotInterval;
}
public long getRetryMaxWaitMillis() {
return retryMaxWaitMillis;
}
public static Impl.Builder newBuilder() {
return new Builder();
}
public static class Builder {
private int retryTimesMax = 10;
private long retryWaitMillis = 100;
private long retryMaxWaitMillis = 5000;
// 120s
private long checkAndTakeSnapshotInterval = 120;
// 20GB
private long raftLogSizeMaxThreshold = 20L << 30;
// -1L means no force, measured in seconds
private long forceSnapshotInterval = -1;
public Impl build() {
return new Impl(
retryTimesMax,
retryWaitMillis,
retryMaxWaitMillis,
checkAndTakeSnapshotInterval,
raftLogSizeMaxThreshold,
forceSnapshotInterval);
}
public Impl.Builder setRetryTimesMax(int retryTimesMax) {
this.retryTimesMax = retryTimesMax;
return this;
}
public Impl.Builder setRetryWaitMillis(long retryWaitMillis) {
this.retryWaitMillis = retryWaitMillis;
return this;
}
public Impl.Builder setCheckAndTakeSnapshotInterval(long checkAndTakeSnapshotInterval) {
this.checkAndTakeSnapshotInterval = checkAndTakeSnapshotInterval;
return this;
}
public Impl.Builder setRaftLogSizeMaxThreshold(long raftLogSizeMaxThreshold) {
this.raftLogSizeMaxThreshold = raftLogSizeMaxThreshold;
return this;
}
public Impl.Builder setForceSnapshotInterval(long forceSnapshotInterval) {
this.forceSnapshotInterval = forceSnapshotInterval;
return this;
}
public Impl.Builder setRetryMaxWaitMillis(long retryMaxWaitTimeMillis) {
this.retryMaxWaitMillis = retryMaxWaitTimeMillis;
return this;
}
}
}
public static class LeaderLogAppender {
private final SizeInBytes bufferByteLimit;
private final SizeInBytes snapshotChunkSizeMax;
private final boolean installSnapshotEnabled;
private LeaderLogAppender(
SizeInBytes bufferByteLimit,
SizeInBytes snapshotChunkSizeMax,
boolean installSnapshotEnabled) {
this.bufferByteLimit = bufferByteLimit;
this.snapshotChunkSizeMax = snapshotChunkSizeMax;
this.installSnapshotEnabled = installSnapshotEnabled;
}
public SizeInBytes getBufferByteLimit() {
return bufferByteLimit;
}
public SizeInBytes getSnapshotChunkSizeMax() {
return snapshotChunkSizeMax;
}
public boolean isInstallSnapshotEnabled() {
return installSnapshotEnabled;
}
public static LeaderLogAppender.Builder newBuilder() {
return new LeaderLogAppender.Builder();
}
public static class Builder {
private SizeInBytes bufferByteLimit =
RaftServerConfigKeys.Log.Appender.BUFFER_BYTE_LIMIT_DEFAULT;
private SizeInBytes snapshotChunkSizeMax =
RaftServerConfigKeys.Log.Appender.SNAPSHOT_CHUNK_SIZE_MAX_DEFAULT;
private boolean installSnapshotEnabled =
RaftServerConfigKeys.Log.Appender.INSTALL_SNAPSHOT_ENABLED_DEFAULT;
public LeaderLogAppender build() {
return new LeaderLogAppender(bufferByteLimit, snapshotChunkSizeMax, installSnapshotEnabled);
}
public LeaderLogAppender.Builder setBufferByteLimit(long bufferByteLimit) {
this.bufferByteLimit = SizeInBytes.valueOf(bufferByteLimit);
return this;
}
public LeaderLogAppender.Builder setSnapshotChunkSizeMax(long snapshotChunkSizeMax) {
this.snapshotChunkSizeMax = SizeInBytes.valueOf(snapshotChunkSizeMax);
return this;
}
public LeaderLogAppender.Builder setInstallSnapshotEnabled(boolean installSnapshotEnabled) {
this.installSnapshotEnabled = installSnapshotEnabled;
return this;
}
}
}
public static class Read {
public enum Option {
DEFAULT,
LINEARIZABLE
}
private final Read.Option readOption;
private final TimeDuration readTimeout;
private Read(Read.Option readOption, TimeDuration readTimeout) {
this.readOption = readOption;
this.readTimeout = readTimeout;
}
public Option getReadOption() {
return readOption;
}
public TimeDuration getReadTimeout() {
return readTimeout;
}
public static Read.Builder newBuilder() {
return new Read.Builder();
}
public static class Builder {
private Read.Option readOption = Option.DEFAULT;
private TimeDuration readTimeout = TimeDuration.valueOf(10, TimeUnit.SECONDS);
public Read.Builder setReadOption(Read.Option readOption) {
this.readOption = readOption;
return this;
}
public Read.Builder setReadTimeout(TimeDuration timeout) {
this.readTimeout = timeout;
return this;
}
public Read build() {
return new Read(readOption, readTimeout);
}
}
}
public static class Utils {
private final int sleepDeviationThresholdMs;
private final int closeThresholdMs;
private final int transferLeaderTimeoutMs;
private Utils(
int sleepDeviationThresholdMs, int closeThresholdMs, int transferLeaderTimeoutMs) {
this.sleepDeviationThresholdMs = sleepDeviationThresholdMs;
this.closeThresholdMs = closeThresholdMs;
this.transferLeaderTimeoutMs = transferLeaderTimeoutMs;
}
public int getSleepDeviationThresholdMs() {
return sleepDeviationThresholdMs;
}
public int getCloseThresholdMs() {
return closeThresholdMs;
}
public int getTransferLeaderTimeoutMs() {
return transferLeaderTimeoutMs;
}
public static Utils.Builder newBuilder() {
return new Utils.Builder();
}
public static class Builder {
private int sleepDeviationThresholdMs = 4 * 1000;
private int closeThresholdMs = Integer.MAX_VALUE;
private int transferLeaderTimeoutMs = 30 * 1000;
public Utils build() {
return new Utils(sleepDeviationThresholdMs, closeThresholdMs, transferLeaderTimeoutMs);
}
public Utils.Builder setSleepDeviationThresholdMs(int sleepDeviationThresholdMs) {
this.sleepDeviationThresholdMs = sleepDeviationThresholdMs;
return this;
}
public Utils.Builder setCloseThresholdMs(int closeThresholdMs) {
this.closeThresholdMs = closeThresholdMs;
return this;
}
public Utils.Builder setTransferLeaderTimeoutMs(int transferLeaderTimeoutMs) {
this.transferLeaderTimeoutMs = transferLeaderTimeoutMs;
return this;
}
}
}
}
|
googleapis/google-cloud-java | 35,995 | java-discoveryengine/proto-google-cloud-discoveryengine-v1/src/main/java/com/google/cloud/discoveryengine/v1/ListControlsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1/control_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1;
/**
*
*
* <pre>
* Response for ListControls method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.ListControlsResponse}
*/
public final class ListControlsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1.ListControlsResponse)
ListControlsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListControlsResponse.newBuilder() to construct.
private ListControlsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListControlsResponse() {
controls_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListControlsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.ControlServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListControlsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.ControlServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListControlsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.ListControlsResponse.class,
com.google.cloud.discoveryengine.v1.ListControlsResponse.Builder.class);
}
public static final int CONTROLS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.discoveryengine.v1.Control> controls_;
/**
*
*
* <pre>
* All the Controls for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Control controls = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.discoveryengine.v1.Control> getControlsList() {
return controls_;
}
/**
*
*
* <pre>
* All the Controls for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Control controls = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.discoveryengine.v1.ControlOrBuilder>
getControlsOrBuilderList() {
return controls_;
}
/**
*
*
* <pre>
* All the Controls for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Control controls = 1;</code>
*/
@java.lang.Override
public int getControlsCount() {
return controls_.size();
}
/**
*
*
* <pre>
* All the Controls for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Control controls = 1;</code>
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1.Control getControls(int index) {
return controls_.get(index);
}
/**
*
*
* <pre>
* All the Controls for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Control controls = 1;</code>
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ControlOrBuilder getControlsOrBuilder(int index) {
return controls_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < controls_.size(); i++) {
output.writeMessage(1, controls_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < controls_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, controls_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.discoveryengine.v1.ListControlsResponse)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1.ListControlsResponse other =
(com.google.cloud.discoveryengine.v1.ListControlsResponse) obj;
if (!getControlsList().equals(other.getControlsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getControlsCount() > 0) {
hash = (37 * hash) + CONTROLS_FIELD_NUMBER;
hash = (53 * hash) + getControlsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1.ListControlsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.ListControlsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListControlsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.ListControlsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListControlsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.ListControlsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListControlsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.ListControlsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListControlsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.ListControlsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListControlsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.ListControlsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.discoveryengine.v1.ListControlsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for ListControls method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.ListControlsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1.ListControlsResponse)
com.google.cloud.discoveryengine.v1.ListControlsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.ControlServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListControlsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.ControlServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListControlsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.ListControlsResponse.class,
com.google.cloud.discoveryengine.v1.ListControlsResponse.Builder.class);
}
// Construct using com.google.cloud.discoveryengine.v1.ListControlsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (controlsBuilder_ == null) {
controls_ = java.util.Collections.emptyList();
} else {
controls_ = null;
controlsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1.ControlServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListControlsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ListControlsResponse getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1.ListControlsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ListControlsResponse build() {
com.google.cloud.discoveryengine.v1.ListControlsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ListControlsResponse buildPartial() {
com.google.cloud.discoveryengine.v1.ListControlsResponse result =
new com.google.cloud.discoveryengine.v1.ListControlsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.discoveryengine.v1.ListControlsResponse result) {
if (controlsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
controls_ = java.util.Collections.unmodifiableList(controls_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.controls_ = controls_;
} else {
result.controls_ = controlsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.discoveryengine.v1.ListControlsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.discoveryengine.v1.ListControlsResponse) {
return mergeFrom((com.google.cloud.discoveryengine.v1.ListControlsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.discoveryengine.v1.ListControlsResponse other) {
if (other == com.google.cloud.discoveryengine.v1.ListControlsResponse.getDefaultInstance())
return this;
if (controlsBuilder_ == null) {
if (!other.controls_.isEmpty()) {
if (controls_.isEmpty()) {
controls_ = other.controls_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureControlsIsMutable();
controls_.addAll(other.controls_);
}
onChanged();
}
} else {
if (!other.controls_.isEmpty()) {
if (controlsBuilder_.isEmpty()) {
controlsBuilder_.dispose();
controlsBuilder_ = null;
controls_ = other.controls_;
bitField0_ = (bitField0_ & ~0x00000001);
controlsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getControlsFieldBuilder()
: null;
} else {
controlsBuilder_.addAllMessages(other.controls_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.discoveryengine.v1.Control m =
input.readMessage(
com.google.cloud.discoveryengine.v1.Control.parser(), extensionRegistry);
if (controlsBuilder_ == null) {
ensureControlsIsMutable();
controls_.add(m);
} else {
controlsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.discoveryengine.v1.Control> controls_ =
java.util.Collections.emptyList();
private void ensureControlsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
controls_ = new java.util.ArrayList<com.google.cloud.discoveryengine.v1.Control>(controls_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1.Control,
com.google.cloud.discoveryengine.v1.Control.Builder,
com.google.cloud.discoveryengine.v1.ControlOrBuilder>
controlsBuilder_;
/**
*
*
* <pre>
* All the Controls for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Control controls = 1;</code>
*/
public java.util.List<com.google.cloud.discoveryengine.v1.Control> getControlsList() {
if (controlsBuilder_ == null) {
return java.util.Collections.unmodifiableList(controls_);
} else {
return controlsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* All the Controls for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Control controls = 1;</code>
*/
public int getControlsCount() {
if (controlsBuilder_ == null) {
return controls_.size();
} else {
return controlsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* All the Controls for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Control controls = 1;</code>
*/
public com.google.cloud.discoveryengine.v1.Control getControls(int index) {
if (controlsBuilder_ == null) {
return controls_.get(index);
} else {
return controlsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* All the Controls for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Control controls = 1;</code>
*/
public Builder setControls(int index, com.google.cloud.discoveryengine.v1.Control value) {
if (controlsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureControlsIsMutable();
controls_.set(index, value);
onChanged();
} else {
controlsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the Controls for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Control controls = 1;</code>
*/
public Builder setControls(
int index, com.google.cloud.discoveryengine.v1.Control.Builder builderForValue) {
if (controlsBuilder_ == null) {
ensureControlsIsMutable();
controls_.set(index, builderForValue.build());
onChanged();
} else {
controlsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the Controls for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Control controls = 1;</code>
*/
public Builder addControls(com.google.cloud.discoveryengine.v1.Control value) {
if (controlsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureControlsIsMutable();
controls_.add(value);
onChanged();
} else {
controlsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* All the Controls for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Control controls = 1;</code>
*/
public Builder addControls(int index, com.google.cloud.discoveryengine.v1.Control value) {
if (controlsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureControlsIsMutable();
controls_.add(index, value);
onChanged();
} else {
controlsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the Controls for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Control controls = 1;</code>
*/
public Builder addControls(
com.google.cloud.discoveryengine.v1.Control.Builder builderForValue) {
if (controlsBuilder_ == null) {
ensureControlsIsMutable();
controls_.add(builderForValue.build());
onChanged();
} else {
controlsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the Controls for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Control controls = 1;</code>
*/
public Builder addControls(
int index, com.google.cloud.discoveryengine.v1.Control.Builder builderForValue) {
if (controlsBuilder_ == null) {
ensureControlsIsMutable();
controls_.add(index, builderForValue.build());
onChanged();
} else {
controlsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the Controls for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Control controls = 1;</code>
*/
public Builder addAllControls(
java.lang.Iterable<? extends com.google.cloud.discoveryengine.v1.Control> values) {
if (controlsBuilder_ == null) {
ensureControlsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, controls_);
onChanged();
} else {
controlsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* All the Controls for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Control controls = 1;</code>
*/
public Builder clearControls() {
if (controlsBuilder_ == null) {
controls_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
controlsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* All the Controls for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Control controls = 1;</code>
*/
public Builder removeControls(int index) {
if (controlsBuilder_ == null) {
ensureControlsIsMutable();
controls_.remove(index);
onChanged();
} else {
controlsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* All the Controls for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Control controls = 1;</code>
*/
public com.google.cloud.discoveryengine.v1.Control.Builder getControlsBuilder(int index) {
return getControlsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* All the Controls for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Control controls = 1;</code>
*/
public com.google.cloud.discoveryengine.v1.ControlOrBuilder getControlsOrBuilder(int index) {
if (controlsBuilder_ == null) {
return controls_.get(index);
} else {
return controlsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* All the Controls for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Control controls = 1;</code>
*/
public java.util.List<? extends com.google.cloud.discoveryengine.v1.ControlOrBuilder>
getControlsOrBuilderList() {
if (controlsBuilder_ != null) {
return controlsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(controls_);
}
}
/**
*
*
* <pre>
* All the Controls for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Control controls = 1;</code>
*/
public com.google.cloud.discoveryengine.v1.Control.Builder addControlsBuilder() {
return getControlsFieldBuilder()
.addBuilder(com.google.cloud.discoveryengine.v1.Control.getDefaultInstance());
}
/**
*
*
* <pre>
* All the Controls for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Control controls = 1;</code>
*/
public com.google.cloud.discoveryengine.v1.Control.Builder addControlsBuilder(int index) {
return getControlsFieldBuilder()
.addBuilder(index, com.google.cloud.discoveryengine.v1.Control.getDefaultInstance());
}
/**
*
*
* <pre>
* All the Controls for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Control controls = 1;</code>
*/
public java.util.List<com.google.cloud.discoveryengine.v1.Control.Builder>
getControlsBuilderList() {
return getControlsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1.Control,
com.google.cloud.discoveryengine.v1.Control.Builder,
com.google.cloud.discoveryengine.v1.ControlOrBuilder>
getControlsFieldBuilder() {
if (controlsBuilder_ == null) {
controlsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1.Control,
com.google.cloud.discoveryengine.v1.Control.Builder,
com.google.cloud.discoveryengine.v1.ControlOrBuilder>(
controls_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
controls_ = null;
}
return controlsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1.ListControlsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1.ListControlsResponse)
private static final com.google.cloud.discoveryengine.v1.ListControlsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1.ListControlsResponse();
}
public static com.google.cloud.discoveryengine.v1.ListControlsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListControlsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListControlsResponse>() {
@java.lang.Override
public ListControlsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListControlsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListControlsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ListControlsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,007 | java-visionai/proto-google-cloud-visionai-v1/src/main/java/com/google/cloud/visionai/v1/ApplicationNodeAnnotation.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/visionai/v1/platform.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.visionai.v1;
/**
*
*
* <pre>
* Message describing annotations specific to application node.
* This message is a duplication of StreamWithAnnotation.NodeAnnotation.
* </pre>
*
* Protobuf type {@code google.cloud.visionai.v1.ApplicationNodeAnnotation}
*/
public final class ApplicationNodeAnnotation extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.visionai.v1.ApplicationNodeAnnotation)
ApplicationNodeAnnotationOrBuilder {
private static final long serialVersionUID = 0L;
// Use ApplicationNodeAnnotation.newBuilder() to construct.
private ApplicationNodeAnnotation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ApplicationNodeAnnotation() {
node_ = "";
annotations_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ApplicationNodeAnnotation();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.visionai.v1.PlatformProto
.internal_static_google_cloud_visionai_v1_ApplicationNodeAnnotation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.visionai.v1.PlatformProto
.internal_static_google_cloud_visionai_v1_ApplicationNodeAnnotation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.visionai.v1.ApplicationNodeAnnotation.class,
com.google.cloud.visionai.v1.ApplicationNodeAnnotation.Builder.class);
}
public static final int NODE_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object node_ = "";
/**
*
*
* <pre>
* The node name of the application graph.
* </pre>
*
* <code>string node = 1;</code>
*
* @return The node.
*/
@java.lang.Override
public java.lang.String getNode() {
java.lang.Object ref = node_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
node_ = s;
return s;
}
}
/**
*
*
* <pre>
* The node name of the application graph.
* </pre>
*
* <code>string node = 1;</code>
*
* @return The bytes for node.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNodeBytes() {
java.lang.Object ref = node_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
node_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ANNOTATIONS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.visionai.v1.StreamAnnotation> annotations_;
/**
*
*
* <pre>
* The node specific stream annotations.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.StreamAnnotation annotations = 2;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.visionai.v1.StreamAnnotation> getAnnotationsList() {
return annotations_;
}
/**
*
*
* <pre>
* The node specific stream annotations.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.StreamAnnotation annotations = 2;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.visionai.v1.StreamAnnotationOrBuilder>
getAnnotationsOrBuilderList() {
return annotations_;
}
/**
*
*
* <pre>
* The node specific stream annotations.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.StreamAnnotation annotations = 2;</code>
*/
@java.lang.Override
public int getAnnotationsCount() {
return annotations_.size();
}
/**
*
*
* <pre>
* The node specific stream annotations.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.StreamAnnotation annotations = 2;</code>
*/
@java.lang.Override
public com.google.cloud.visionai.v1.StreamAnnotation getAnnotations(int index) {
return annotations_.get(index);
}
/**
*
*
* <pre>
* The node specific stream annotations.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.StreamAnnotation annotations = 2;</code>
*/
@java.lang.Override
public com.google.cloud.visionai.v1.StreamAnnotationOrBuilder getAnnotationsOrBuilder(int index) {
return annotations_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(node_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, node_);
}
for (int i = 0; i < annotations_.size(); i++) {
output.writeMessage(2, annotations_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(node_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, node_);
}
for (int i = 0; i < annotations_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, annotations_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.visionai.v1.ApplicationNodeAnnotation)) {
return super.equals(obj);
}
com.google.cloud.visionai.v1.ApplicationNodeAnnotation other =
(com.google.cloud.visionai.v1.ApplicationNodeAnnotation) obj;
if (!getNode().equals(other.getNode())) return false;
if (!getAnnotationsList().equals(other.getAnnotationsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NODE_FIELD_NUMBER;
hash = (53 * hash) + getNode().hashCode();
if (getAnnotationsCount() > 0) {
hash = (37 * hash) + ANNOTATIONS_FIELD_NUMBER;
hash = (53 * hash) + getAnnotationsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.visionai.v1.ApplicationNodeAnnotation parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.ApplicationNodeAnnotation parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ApplicationNodeAnnotation parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.ApplicationNodeAnnotation parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ApplicationNodeAnnotation parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.ApplicationNodeAnnotation parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ApplicationNodeAnnotation parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.ApplicationNodeAnnotation parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ApplicationNodeAnnotation parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.ApplicationNodeAnnotation parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ApplicationNodeAnnotation parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.ApplicationNodeAnnotation parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.visionai.v1.ApplicationNodeAnnotation prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message describing annotations specific to application node.
* This message is a duplication of StreamWithAnnotation.NodeAnnotation.
* </pre>
*
* Protobuf type {@code google.cloud.visionai.v1.ApplicationNodeAnnotation}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.visionai.v1.ApplicationNodeAnnotation)
com.google.cloud.visionai.v1.ApplicationNodeAnnotationOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.visionai.v1.PlatformProto
.internal_static_google_cloud_visionai_v1_ApplicationNodeAnnotation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.visionai.v1.PlatformProto
.internal_static_google_cloud_visionai_v1_ApplicationNodeAnnotation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.visionai.v1.ApplicationNodeAnnotation.class,
com.google.cloud.visionai.v1.ApplicationNodeAnnotation.Builder.class);
}
// Construct using com.google.cloud.visionai.v1.ApplicationNodeAnnotation.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
node_ = "";
if (annotationsBuilder_ == null) {
annotations_ = java.util.Collections.emptyList();
} else {
annotations_ = null;
annotationsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.visionai.v1.PlatformProto
.internal_static_google_cloud_visionai_v1_ApplicationNodeAnnotation_descriptor;
}
@java.lang.Override
public com.google.cloud.visionai.v1.ApplicationNodeAnnotation getDefaultInstanceForType() {
return com.google.cloud.visionai.v1.ApplicationNodeAnnotation.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.visionai.v1.ApplicationNodeAnnotation build() {
com.google.cloud.visionai.v1.ApplicationNodeAnnotation result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.visionai.v1.ApplicationNodeAnnotation buildPartial() {
com.google.cloud.visionai.v1.ApplicationNodeAnnotation result =
new com.google.cloud.visionai.v1.ApplicationNodeAnnotation(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.visionai.v1.ApplicationNodeAnnotation result) {
if (annotationsBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
annotations_ = java.util.Collections.unmodifiableList(annotations_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.annotations_ = annotations_;
} else {
result.annotations_ = annotationsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.visionai.v1.ApplicationNodeAnnotation result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.node_ = node_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.visionai.v1.ApplicationNodeAnnotation) {
return mergeFrom((com.google.cloud.visionai.v1.ApplicationNodeAnnotation) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.visionai.v1.ApplicationNodeAnnotation other) {
if (other == com.google.cloud.visionai.v1.ApplicationNodeAnnotation.getDefaultInstance())
return this;
if (!other.getNode().isEmpty()) {
node_ = other.node_;
bitField0_ |= 0x00000001;
onChanged();
}
if (annotationsBuilder_ == null) {
if (!other.annotations_.isEmpty()) {
if (annotations_.isEmpty()) {
annotations_ = other.annotations_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureAnnotationsIsMutable();
annotations_.addAll(other.annotations_);
}
onChanged();
}
} else {
if (!other.annotations_.isEmpty()) {
if (annotationsBuilder_.isEmpty()) {
annotationsBuilder_.dispose();
annotationsBuilder_ = null;
annotations_ = other.annotations_;
bitField0_ = (bitField0_ & ~0x00000002);
annotationsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAnnotationsFieldBuilder()
: null;
} else {
annotationsBuilder_.addAllMessages(other.annotations_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
node_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
com.google.cloud.visionai.v1.StreamAnnotation m =
input.readMessage(
com.google.cloud.visionai.v1.StreamAnnotation.parser(), extensionRegistry);
if (annotationsBuilder_ == null) {
ensureAnnotationsIsMutable();
annotations_.add(m);
} else {
annotationsBuilder_.addMessage(m);
}
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object node_ = "";
/**
*
*
* <pre>
* The node name of the application graph.
* </pre>
*
* <code>string node = 1;</code>
*
* @return The node.
*/
public java.lang.String getNode() {
java.lang.Object ref = node_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
node_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The node name of the application graph.
* </pre>
*
* <code>string node = 1;</code>
*
* @return The bytes for node.
*/
public com.google.protobuf.ByteString getNodeBytes() {
java.lang.Object ref = node_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
node_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The node name of the application graph.
* </pre>
*
* <code>string node = 1;</code>
*
* @param value The node to set.
* @return This builder for chaining.
*/
public Builder setNode(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
node_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The node name of the application graph.
* </pre>
*
* <code>string node = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearNode() {
node_ = getDefaultInstance().getNode();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The node name of the application graph.
* </pre>
*
* <code>string node = 1;</code>
*
* @param value The bytes for node to set.
* @return This builder for chaining.
*/
public Builder setNodeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
node_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.util.List<com.google.cloud.visionai.v1.StreamAnnotation> annotations_ =
java.util.Collections.emptyList();
private void ensureAnnotationsIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
annotations_ =
new java.util.ArrayList<com.google.cloud.visionai.v1.StreamAnnotation>(annotations_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.visionai.v1.StreamAnnotation,
com.google.cloud.visionai.v1.StreamAnnotation.Builder,
com.google.cloud.visionai.v1.StreamAnnotationOrBuilder>
annotationsBuilder_;
/**
*
*
* <pre>
* The node specific stream annotations.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.StreamAnnotation annotations = 2;</code>
*/
public java.util.List<com.google.cloud.visionai.v1.StreamAnnotation> getAnnotationsList() {
if (annotationsBuilder_ == null) {
return java.util.Collections.unmodifiableList(annotations_);
} else {
return annotationsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The node specific stream annotations.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.StreamAnnotation annotations = 2;</code>
*/
public int getAnnotationsCount() {
if (annotationsBuilder_ == null) {
return annotations_.size();
} else {
return annotationsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The node specific stream annotations.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.StreamAnnotation annotations = 2;</code>
*/
public com.google.cloud.visionai.v1.StreamAnnotation getAnnotations(int index) {
if (annotationsBuilder_ == null) {
return annotations_.get(index);
} else {
return annotationsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The node specific stream annotations.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.StreamAnnotation annotations = 2;</code>
*/
public Builder setAnnotations(int index, com.google.cloud.visionai.v1.StreamAnnotation value) {
if (annotationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationsIsMutable();
annotations_.set(index, value);
onChanged();
} else {
annotationsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The node specific stream annotations.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.StreamAnnotation annotations = 2;</code>
*/
public Builder setAnnotations(
int index, com.google.cloud.visionai.v1.StreamAnnotation.Builder builderForValue) {
if (annotationsBuilder_ == null) {
ensureAnnotationsIsMutable();
annotations_.set(index, builderForValue.build());
onChanged();
} else {
annotationsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The node specific stream annotations.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.StreamAnnotation annotations = 2;</code>
*/
public Builder addAnnotations(com.google.cloud.visionai.v1.StreamAnnotation value) {
if (annotationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationsIsMutable();
annotations_.add(value);
onChanged();
} else {
annotationsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The node specific stream annotations.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.StreamAnnotation annotations = 2;</code>
*/
public Builder addAnnotations(int index, com.google.cloud.visionai.v1.StreamAnnotation value) {
if (annotationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationsIsMutable();
annotations_.add(index, value);
onChanged();
} else {
annotationsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The node specific stream annotations.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.StreamAnnotation annotations = 2;</code>
*/
public Builder addAnnotations(
com.google.cloud.visionai.v1.StreamAnnotation.Builder builderForValue) {
if (annotationsBuilder_ == null) {
ensureAnnotationsIsMutable();
annotations_.add(builderForValue.build());
onChanged();
} else {
annotationsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The node specific stream annotations.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.StreamAnnotation annotations = 2;</code>
*/
public Builder addAnnotations(
int index, com.google.cloud.visionai.v1.StreamAnnotation.Builder builderForValue) {
if (annotationsBuilder_ == null) {
ensureAnnotationsIsMutable();
annotations_.add(index, builderForValue.build());
onChanged();
} else {
annotationsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The node specific stream annotations.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.StreamAnnotation annotations = 2;</code>
*/
public Builder addAllAnnotations(
java.lang.Iterable<? extends com.google.cloud.visionai.v1.StreamAnnotation> values) {
if (annotationsBuilder_ == null) {
ensureAnnotationsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, annotations_);
onChanged();
} else {
annotationsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The node specific stream annotations.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.StreamAnnotation annotations = 2;</code>
*/
public Builder clearAnnotations() {
if (annotationsBuilder_ == null) {
annotations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
annotationsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The node specific stream annotations.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.StreamAnnotation annotations = 2;</code>
*/
public Builder removeAnnotations(int index) {
if (annotationsBuilder_ == null) {
ensureAnnotationsIsMutable();
annotations_.remove(index);
onChanged();
} else {
annotationsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The node specific stream annotations.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.StreamAnnotation annotations = 2;</code>
*/
public com.google.cloud.visionai.v1.StreamAnnotation.Builder getAnnotationsBuilder(int index) {
return getAnnotationsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The node specific stream annotations.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.StreamAnnotation annotations = 2;</code>
*/
public com.google.cloud.visionai.v1.StreamAnnotationOrBuilder getAnnotationsOrBuilder(
int index) {
if (annotationsBuilder_ == null) {
return annotations_.get(index);
} else {
return annotationsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The node specific stream annotations.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.StreamAnnotation annotations = 2;</code>
*/
public java.util.List<? extends com.google.cloud.visionai.v1.StreamAnnotationOrBuilder>
getAnnotationsOrBuilderList() {
if (annotationsBuilder_ != null) {
return annotationsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(annotations_);
}
}
/**
*
*
* <pre>
* The node specific stream annotations.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.StreamAnnotation annotations = 2;</code>
*/
public com.google.cloud.visionai.v1.StreamAnnotation.Builder addAnnotationsBuilder() {
return getAnnotationsFieldBuilder()
.addBuilder(com.google.cloud.visionai.v1.StreamAnnotation.getDefaultInstance());
}
/**
*
*
* <pre>
* The node specific stream annotations.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.StreamAnnotation annotations = 2;</code>
*/
public com.google.cloud.visionai.v1.StreamAnnotation.Builder addAnnotationsBuilder(int index) {
return getAnnotationsFieldBuilder()
.addBuilder(index, com.google.cloud.visionai.v1.StreamAnnotation.getDefaultInstance());
}
/**
*
*
* <pre>
* The node specific stream annotations.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.StreamAnnotation annotations = 2;</code>
*/
public java.util.List<com.google.cloud.visionai.v1.StreamAnnotation.Builder>
getAnnotationsBuilderList() {
return getAnnotationsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.visionai.v1.StreamAnnotation,
com.google.cloud.visionai.v1.StreamAnnotation.Builder,
com.google.cloud.visionai.v1.StreamAnnotationOrBuilder>
getAnnotationsFieldBuilder() {
if (annotationsBuilder_ == null) {
annotationsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.visionai.v1.StreamAnnotation,
com.google.cloud.visionai.v1.StreamAnnotation.Builder,
com.google.cloud.visionai.v1.StreamAnnotationOrBuilder>(
annotations_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean());
annotations_ = null;
}
return annotationsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.visionai.v1.ApplicationNodeAnnotation)
}
// @@protoc_insertion_point(class_scope:google.cloud.visionai.v1.ApplicationNodeAnnotation)
private static final com.google.cloud.visionai.v1.ApplicationNodeAnnotation DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.visionai.v1.ApplicationNodeAnnotation();
}
public static com.google.cloud.visionai.v1.ApplicationNodeAnnotation getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ApplicationNodeAnnotation> PARSER =
new com.google.protobuf.AbstractParser<ApplicationNodeAnnotation>() {
@java.lang.Override
public ApplicationNodeAnnotation parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ApplicationNodeAnnotation> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ApplicationNodeAnnotation> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.visionai.v1.ApplicationNodeAnnotation getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/j2cl | 36,130 | transpiler/java/com/google/j2cl/transpiler/ast/TypeDeclaration.java | /*
* Copyright 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.j2cl.transpiler.ast;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.ImmutableSet.toImmutableSet;
import com.google.auto.value.AutoValue;
import com.google.auto.value.extension.memoized.Memoized;
import com.google.common.base.Joiner;
import com.google.common.base.Predicates;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Streams;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.j2cl.common.ThreadLocalInterner;
import com.google.j2cl.common.visitor.Processor;
import com.google.j2cl.common.visitor.Visitable;
import com.google.j2cl.transpiler.ast.TypeDescriptors.BootstrapType;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.function.Predicate;
import java.util.function.Supplier;
import java.util.stream.Stream;
import javax.annotation.Nullable;
/**
* A declaration-site reference to a type.
*
* <p>This class is mostly a bag of precomputed properties, and the details of how those properties
* are created live in several creation functions in JdtUtils and TypeDeclarations.
*
* <p>A couple of properties are lazily calculated via the DescriptorFactory and interface, since
* eagerly calculating them would lead to infinite loops of Descriptor creation.
*
* <p>Since these are all declaration-site references, when there are type variables they are always
* thought of as type parameters.
*/
@Visitable
@AutoValue
public abstract class TypeDeclaration
implements HasJsNameInfo, HasReadableDescription, HasAnnotations {
/** Kind of type declaration. */
public enum Kind {
CLASS,
ENUM,
INTERFACE
}
/** Source language a type was written in. */
public enum SourceLanguage {
JAVA,
KOTLIN
}
/** The origin of the class. */
public enum Origin {
SOURCE,
LAMBDA_ABSTRACT_ADAPTOR,
LAMBDA_IMPLEMENTOR
}
private static final String OVERLAY_IMPLEMENTATION_CLASS_SUFFIX = "Overlay";
/**
* References to some descriptors need to be deferred in some cases since it will cause infinite
* loops.
*/
public interface DescriptorFactory<T> {
T get(TypeDeclaration typeDeclaration);
}
@Override
public final boolean equals(Object o) {
return o instanceof TypeDeclaration other && getUniqueId().equals(other.getUniqueId());
}
@Memoized
public boolean declaresDefaultMethods() {
return isInterface()
&& getDeclaredMethodDescriptors().stream().anyMatch(MethodDescriptor::isDefaultMethod);
}
/**
* Returns the unqualified simple source name as written in the source, {@code null} if the class
* does not have a name, i.e. it is an anonymous class or a synthetic class.
*/
@Nullable
public abstract String getOriginalSimpleSourceName();
/**
* Returns the unqualified name like "Inner".
*
* <p>Note: this is the simple name of the class and might or might not be the original simple
* source name, e.g. for local classes it returns a synthetic name that make them unique as inner
* classes of their enclosing types; it also returns names for classes that don't have source
* names, e.g. anonymous and synthetic classes.
*/
@Memoized
public String getSimpleSourceName() {
return AstUtils.getSimpleSourceName(getClassComponents());
}
/** Returns the simple binary name like "Outer$Inner". Used for file naming purposes. */
@Memoized
public String getSimpleBinaryName() {
return Joiner.on('$').join(getClassComponents());
}
/**
* Returns the fully package qualified binary name like "com.google.common.Outer$Inner".
*
* <p>Used for generated class metadata (per JLS), file overview, file path, unique id calculation
* and other similar scenarios.
*/
@Memoized
public String getQualifiedBinaryName() {
return AstUtils.buildQualifiedName(getPackageName(), getSimpleBinaryName());
}
/**
* Returns the mangled name of a type.
*
* <p>The mangled name of a type is a string that uniquely identifies the type and will become
* part of the JavaScript method name to be able to differentiate method overloads.
*/
@Memoized
public String getMangledName() {
return getQualifiedSourceName().replace('.', '_');
}
/** Returns the globally unique qualified name by which this type should be defined/imported. */
public String getModuleName() {
return getQualifiedJsName();
}
/** Returns the type descriptor for the module that needs to be required for this type */
@Memoized
public TypeDeclaration getEnclosingModule() {
String moduleRelativeJsName = getModuleRelativeJsName();
if (!isNative() || !moduleRelativeJsName.contains(".")) {
return this;
}
if (getEnclosingTypeDeclaration() != null && !hasCustomizedJsNamespace()) {
// Make sure that if the enclosing module is a non native type, getEnclosing module returns
// the normal Java TypeDeclaration instead of synthesizing a native one. This is important
// because it guarantees that the type will be goog.required using the "$impl" module not the
// header module which might cause dependency cycles.
return getEnclosingTypeDeclaration().getEnclosingModule();
}
// Synthesize a module root.
String enclosingJsName = Iterables.get(Splitter.on('.').split(moduleRelativeJsName), 0);
String enclosingJsNamespace = getJsNamespace();
return TypeDescriptors.createNativeTypeDescriptor(enclosingJsNamespace, enclosingJsName)
.getTypeDeclaration();
}
/**
* Returns the qualifier for the type from the root of the module, {@code ""} if the type is the
* module root.
*/
@Memoized
public String getInnerTypeQualifier() {
String moduleRelativeJsName = getModuleRelativeJsName();
int dotIndex = moduleRelativeJsName.indexOf('.');
if (dotIndex == -1) {
return "";
}
return moduleRelativeJsName.substring(dotIndex + 1);
}
public boolean hasCustomizedJsNamespace() {
return getCustomizedJsNamespace() != null;
}
public String getImplModuleName() {
return isNative() ? getModuleName() : getModuleName() + "$impl";
}
public abstract PackageDeclaration getPackage();
/** Returns the fully package qualified name like "com.google.common". */
public String getPackageName() {
return getPackage().getName();
}
public boolean isInSamePackage(TypeDeclaration other) {
return getPackage() == other.getPackage();
}
/**
* Returns a list of Strings representing the current type's simple name and enclosing type simple
* names. For example for "com.google.foo.Outer" the class components are ["Outer"] and for
* "com.google.foo.Outer.Inner" the class components are ["Outer", "Inner"].
*/
public abstract ImmutableList<String> getClassComponents();
/** Returns new synthesized inner class components. */
public ImmutableList<String> synthesizeInnerClassComponents(Object... parts) {
return ImmutableList.<String>builder()
.addAll(getClassComponents())
.add("$" + Joiner.on("$").skipNulls().join(parts))
.build();
}
@Nullable
public abstract TypeDeclaration getEnclosingTypeDeclaration();
/** Returns the topmost enclosing type declaration for this type. */
public TypeDeclaration getTopEnclosingDeclaration() {
TypeDeclaration enclosingTypeDeclaration = getEnclosingTypeDeclaration();
return enclosingTypeDeclaration == null
? this
: enclosingTypeDeclaration.getTopEnclosingDeclaration();
}
/** Returns the enclosing method descriptor if the class is a local or an anonymous class. */
@Nullable
@Memoized
public MethodDescriptor getEnclosingMethodDescriptor() {
return getEnclosingMethodDescriptorFactory().get();
}
public abstract Origin getOrigin();
abstract Supplier<MethodDescriptor> getEnclosingMethodDescriptorFactory();
public abstract ImmutableList<TypeVariable> getTypeParameterDescriptors();
public abstract Visibility getVisibility();
public abstract Kind getKind();
public abstract boolean isAnnotation();
public abstract SourceLanguage getSourceLanguage();
/** Returns whether the described type is a class. */
public boolean isClass() {
return getKind() == Kind.CLASS;
}
/** Returns whether the described type is an interface. */
public boolean isInterface() {
return getKind() == Kind.INTERFACE;
}
/** Returns whether the described type is an enum. */
public boolean isEnum() {
return getKind() == Kind.ENUM;
}
public boolean isAbstract() {
return getHasAbstractModifier();
}
public abstract boolean isFinal();
// TODO(b/322906767): Remove when the bug is fixed.
private static final boolean PRESERVE_EQUALS_FOR_JSTYPE_INTERFACE =
"true"
.equals(
System.getProperty(
"com.google.j2cl.transpiler.backend.kotlin.preserveEqualsForJsTypeInterface"));
public boolean isKtFunctionalInterface() {
if (!isFunctionalInterface()) {
return false;
}
if (getAllSuperTypesIncludingSelf().stream()
.filter(TypeDeclaration::isInterface)
// TODO(b/317299672): Remove JsType special casing since should preserve all of them for
// migration purposes.
.filter(t -> PRESERVE_EQUALS_FOR_JSTYPE_INTERFACE && t.isJsType())
.flatMap(t -> t.getDeclaredMethodDescriptors().stream())
.anyMatch(MethodDescriptor::isOrOverridesJavaLangObjectMethod)) {
// If the interface has an explicit {@code java.lang.Object} method, it is not considered to
// be functional in Kotlin.
return false;
}
MethodDescriptor methodDescriptor = getSingleAbstractMethodDescriptor();
return !methodDescriptor.isKtProperty()
&& methodDescriptor.getTypeParameterTypeDescriptors().isEmpty();
}
/** Returns whether the described type is a functional interface (JLS 9.8). */
public abstract boolean isFunctionalInterface();
/** Gets a list of annotations present on the declaration. */
@Override
@Memoized
public ImmutableList<Annotation> getAnnotations() {
return getAnnotationsFactory().get();
}
@Memoized
public boolean isJsFunctionImplementation() {
return isClass()
&& getInterfaceTypeDescriptors().stream().anyMatch(TypeDescriptor::isJsFunctionInterface);
}
public abstract boolean isJsFunctionInterface();
public abstract boolean isJsType();
/**
* Returns whether the described type is a nested type (i.e. it is defined inside the body of some
* enclosing type) but is not a member type because it's location in the body is not in the
* declaration scope of the enclosing type. For example:
*
* <p><code> class Foo { void bar() { class Baz {} } } </code>
*
* <p>or
*
* <p><code> class Foo { void bar() { Comparable comparable = new Comparable() { ... } } } </code>
*/
public abstract boolean isLocal();
public abstract boolean isAnonymous();
@Override
public abstract boolean isNative();
@Nullable
public abstract JsEnumInfo getJsEnumInfo();
public boolean isKtNative() {
return getKtTypeInfo() != null;
}
@Nullable
abstract KtTypeInfo getKtTypeInfo();
public boolean isProtobuf() {
return getAllSuperTypesIncludingSelf().stream()
.map(TypeDeclaration::getPackageName)
.anyMatch(it -> it.equals("com.google.protobuf"));
}
public boolean isJsEnum() {
return getJsEnumInfo() != null;
}
/** Returns true if the class captures its enclosing instance */
public abstract boolean isCapturingEnclosingInstance();
@Memoized
public boolean isExtern() {
return isNative() && hasExternNamespace();
}
public boolean isStarOrUnknown() {
return getSimpleJsName().equals("*") || getSimpleJsName().equals("?");
}
private boolean hasExternNamespace() {
// A native type descriptor is an extern if its namespace is the global namespace or if
// it inherited the namespace from its (enclosing) extern type.
return JsUtils.isGlobal(getJsNamespace())
|| (!hasCustomizedJsNamespace()
&& getEnclosingTypeDeclaration() != null
&& getEnclosingTypeDeclaration().isExtern());
}
public boolean hasTypeParameters() {
return !getTypeParameterDescriptors().isEmpty();
}
public boolean implementsInterfaces() {
return !getAllSuperInterfaces().isEmpty();
}
@Memoized
public boolean extendsNativeClass() {
TypeDeclaration superType = getSuperTypeDeclaration();
if (superType == null) {
return false;
}
return superType.isNative() || superType.extendsNativeClass();
}
public boolean hasJsConstructor() {
return !getJsConstructorMethodDescriptors().isEmpty();
}
public boolean isJsConstructorSubtype() {
TypeDeclaration superType = getSuperTypeDeclaration();
return superType != null && superType.hasJsConstructor();
}
/** Whether cast to this type are checked or not. */
public boolean isNoopCast() {
if (isNative() && isJsEnum() && !getJsEnumInfo().hasCustomValue()) {
// Nothing is known about the underlying type of Native JsEnum that don't provide a custom
// value
return true;
}
return isNative() && isInterface();
}
/**
* Returns the JavaScript name for this class. This is same as simple source name unless modified
* by JsType.
*/
@Override
@Nullable
public abstract String getSimpleJsName();
/**
* Returns the qualifier for the type from the root of the module including the module root.
*
* <p>For example in the following code:
*
* <pre>{@code
* class Top {
* @JsType(isNative = true, namespace = "foo", name = "Top.Inner")
* class TopInner {
* @JsType(isNative = true)
* class InnerInner {}
* }
* }
*
* }</pre>
*
* <p>The module relative JS names are in order is Top, Top.Inner, Top.Inner.InnerInner.
*/
@Memoized
String getModuleRelativeJsName() {
if (!isNative() || hasCustomizedJsNamespace() || getEnclosingTypeDeclaration() == null) {
return getSimpleJsName();
}
String enclosingModuleRelativeName = getEnclosingTypeDeclaration().getModuleRelativeJsName();
// enclosingModuleRelativeName can only be empty if the type has TypeDescriptors.globalNamespace
// as an enclosing type. This could only potentially happen in synthetic type descriptors.
return AstUtils.buildQualifiedName(enclosingModuleRelativeName, getSimpleJsName());
}
@Override
@Nullable
@Memoized
public String getJsNamespace() {
if (hasCustomizedJsNamespace()) {
return getCustomizedJsNamespace();
}
if (getEnclosingTypeDeclaration() == null) {
return getPackage().getJsNamespace();
}
if (isNative()) {
return getEnclosingTypeDeclaration().getJsNamespace();
}
if (getEnclosingTypeDeclaration().isNative()) {
// When there is a type nested within a native type, it's important not to generate a name
// like "Array.1" (like would happen if the outer native type was claiming to be native
// Array and the nested type was anonymous) since this is almost guaranteed to collide
// with other people also creating nested classes within a native type that claims to be
// native Array.
return getEnclosingTypeDeclaration().getQualifiedSourceName();
}
// Use the parent qualified name.
return getEnclosingTypeDeclaration().getQualifiedJsName();
}
@Override
@Memoized
public String getQualifiedJsName() {
if (JsUtils.isGlobal(getJsNamespace())) {
return getModuleRelativeJsName();
}
return AstUtils.buildQualifiedName(getJsNamespace(), getModuleRelativeJsName());
}
@Nullable
abstract String getCustomizedJsNamespace();
@Nullable
public abstract String getObjectiveCNamePrefix();
public abstract boolean isNullMarked();
@Memoized
public TypeDeclaration getMetadataTypeDeclaration() {
if (isNative() || (isJsEnum() && AstUtils.isJsEnumBoxingSupported())) {
return getOverlayImplementationTypeDeclaration();
}
if (isJsFunctionInterface()) {
return BootstrapType.JAVA_SCRIPT_FUNCTION.getDeclaration();
}
return this;
}
@Nullable
public abstract TypeDeclaration getOverlaidTypeDeclaration();
@Memoized
public TypeDeclaration getOverlayImplementationTypeDeclaration() {
return newBuilder()
.setEnclosingTypeDeclaration(this)
.setOverlaidTypeDeclaration(this)
.setClassComponents(synthesizeInnerClassComponents(OVERLAY_IMPLEMENTATION_CLASS_SUFFIX))
.setVisibility(Visibility.PUBLIC)
.setKind(getKind())
.build();
}
@Memoized
public boolean hasOverlayImplementationType() {
// TODO(b/116825224): this should just be
// isJsEnum() || isNative() || isJsFunctionInterface() && declaresJsOverlayMembers.
// but there are some synthetic type descriptors created by
// TypeDescriptors.createNativeGlobalTypeDescriptor that do are marked native and confuse the
// rewriting of overlay references.
return isJsEnum()
|| (isJsType() && isNative())
|| (isJsFunctionInterface() && declaresJsOverlayMembers());
}
private boolean declaresJsOverlayMembers() {
return getDeclaredMethodDescriptors().stream().anyMatch(MethodDescriptor::isJsOverlay)
|| getDeclaredFieldDescriptors().stream().anyMatch(FieldDescriptor::isJsOverlay);
}
/**
* Returns a list of the type descriptors of interfaces that are explicitly implemented directly
* on this type.
*/
@Memoized
public ImmutableList<DeclaredTypeDescriptor> getInterfaceTypeDescriptors() {
return getInterfaceTypeDescriptorsFactory().get(this);
}
/**
* Returns the depth of this type in the type hierarchy tree, including classes and interfaces.
*/
@Memoized
public int getTypeHierarchyDepth() {
return 1
+ Stream.concat(Stream.of(getSuperTypeDescriptor()), getInterfaceTypeDescriptors().stream())
.filter(Predicates.notNull())
.mapToInt(i -> i.getTypeDeclaration().getTypeHierarchyDepth())
.max()
.orElse(0);
}
/**
* Returns the fully package qualified source name like "com.google.common.Outer.Inner". Used in
* places where original name is useful (like aliasing, identifying the corresponding java type,
* Debug/Error output, etc.
*/
@Memoized
public String getQualifiedSourceName() {
return AstUtils.buildQualifiedName(
Streams.concat(Stream.of(getPackageName()), getClassComponents().stream()));
}
@Nullable
@Memoized
public String getKtNativeQualifiedName() {
KtTypeInfo ktTypeInfo = getKtTypeInfo();
return ktTypeInfo != null ? ktTypeInfo.getQualifiedName() : null;
}
@Nullable
@Memoized
public String getKtBridgeQualifiedName() {
KtTypeInfo ktTypeInfo = getKtTypeInfo();
return ktTypeInfo != null ? ktTypeInfo.getBridgeQualifiedName() : null;
}
@Nullable
@Memoized
public String getKtCompanionQualifiedName() {
KtTypeInfo ktTypeInfo = getKtTypeInfo();
return ktTypeInfo == null ? null : ktTypeInfo.getCompanionQualifiedName();
}
@Nullable
@Memoized
public String getObjectiveCName() {
return J2ktAstUtils.getObjectiveCName(this);
}
@Memoized
@Nullable
public DeclaredTypeDescriptor getSuperTypeDescriptor() {
return getSuperTypeDescriptorFactory().get(this);
}
@Nullable
public TypeDeclaration getSuperTypeDeclaration() {
return getSuperTypeDescriptor() == null ? null : getSuperTypeDescriptor().getTypeDeclaration();
}
public final boolean hasRecursiveTypeBounds() {
return getTypeParameterDescriptors().stream().anyMatch(TypeVariable::hasRecursiveDefinition);
}
/**
* Returns the erasure type (see definition of erasure type at
* http://help.eclipse.org/luna/index.jsp) with an empty type arguments list.
*/
@Memoized
public DeclaredTypeDescriptor toRawTypeDescriptor() {
return toDescriptor(ImmutableList.of());
}
/**
* Returns the usage site type descriptor corresponding to this declaration with the trivial
* parameterization.
*/
@Memoized
public DeclaredTypeDescriptor toDescriptor() {
return toDescriptor(getTypeParameterDescriptors());
}
/** Returns the usage site type descriptor with parameterization. */
public DeclaredTypeDescriptor toDescriptor(
Iterable<? extends TypeDescriptor> typeArgumentDescriptors) {
return DeclaredTypeDescriptor.newBuilder()
.setTypeDeclaration(this)
.setTypeArgumentDescriptors(typeArgumentDescriptors)
.setNullable(true)
.build();
}
/** A unique string for a given type. Used for interning. */
@Memoized
public String getUniqueId() {
return getQualifiedBinaryName();
}
@Override
@Memoized
public int hashCode() {
return getUniqueId().hashCode();
}
/** Returns {@code true} if {@code this} is subtype of {@code that}. */
public boolean isSubtypeOf(TypeDeclaration that) {
// TODO(b/70951075): distinguish between Java isSubtypeOf and our target interpretation of
// isSubtypeOf for optimization purposes in the context of jsinterop. Note that this method is
// used assuming it provides Java semantics.
return TypeDescriptors.isJavaLangObject(that.toDescriptor())
|| getAllSuperTypesIncludingSelf().contains(that);
}
@Memoized
public Set<TypeDeclaration> getAllSuperTypesIncludingSelf() {
return toDescriptor().getAllSuperTypesIncludingSelf().stream()
.map(DeclaredTypeDescriptor::getTypeDeclaration)
.collect(toImmutableSet());
}
@Memoized
public Set<TypeDeclaration> getAllSuperInterfaces() {
return getAllSuperTypesIncludingSelf().stream()
.filter(Predicate.not(Predicate.isEqual(this)))
.filter(TypeDeclaration::isInterface)
.collect(toImmutableSet());
}
/**
* The list of methods declared in the type. Note: this does not include methods synthetic methods
* (like bridge methods) nor supertype methods that are not overridden in the type.
*/
@Memoized
public ImmutableList<MethodDescriptor> getDeclaredMethodDescriptors() {
return getDeclaredMethodDescriptorsFactory().get(this);
}
/** Returns the JsConstructor for this class if any. */
@Memoized
@Nullable
public List<MethodDescriptor> getJsConstructorMethodDescriptors() {
return getDeclaredMethodDescriptors().stream()
.filter(MethodDescriptor::isJsConstructor)
.collect(toImmutableList());
}
@Memoized
@Nullable
public MethodDescriptor getSingleAbstractMethodDescriptor() {
return getSingleAbstractMethodDescriptorFactory().get(this);
}
/**
* The list of fields declared in the type. Note: this does not include methods synthetic fields
* (like captures) nor supertype fields.
*/
@Memoized
public ImmutableList<FieldDescriptor> getDeclaredFieldDescriptors() {
return getDeclaredFieldDescriptorsFactory().get(this);
}
@Memoized
ImmutableMap<String, Literal> getOrdinalValueByEnumFieldName() {
ImmutableMap.Builder<String, Literal> immutableMapBuilder = ImmutableMap.builder();
int ordinal = 0;
for (FieldDescriptor fieldDescriptor : getDeclaredFieldDescriptors()) {
if (!fieldDescriptor.isEnumConstant()) {
continue;
}
immutableMapBuilder.put(fieldDescriptor.getName(), NumberLiteral.fromInt(ordinal++));
}
return immutableMapBuilder.buildOrThrow();
}
/**
* The list of fields declared in the type. Note: this does not include methods synthetic fields
* (like captures) nor supertype fields.
*/
@Memoized
public ImmutableList<TypeDeclaration> getMemberTypeDeclarations() {
return getMemberTypeDeclarationsFactory().get();
}
@Override
public final String toString() {
return getUniqueId();
}
/** Returns a description that is useful for error messages. */
@Override
public String getReadableDescription() {
// TODO(rluble): Actually provide a real readable description.
if (isAnonymous()) {
if (getInterfaceTypeDescriptors().isEmpty()) {
return "<anonymous> extends " + getSuperTypeDescriptor().getReadableDescription();
} else {
return "<anonymous> implements "
+ getInterfaceTypeDescriptors().get(0).getReadableDescription();
}
} else if (isLocal()) {
return getSimpleSourceName().replaceFirst("\\$\\d+", "");
}
return getSimpleSourceName();
}
/* PRIVATE AUTO_VALUE PROPERTIES */
abstract boolean getHasAbstractModifier();
@Nullable
abstract DescriptorFactory<ImmutableList<DeclaredTypeDescriptor>>
getInterfaceTypeDescriptorsFactory();
@Nullable
abstract DescriptorFactory<DeclaredTypeDescriptor> getSuperTypeDescriptorFactory();
@Nullable
abstract DescriptorFactory<ImmutableList<MethodDescriptor>> getDeclaredMethodDescriptorsFactory();
@Nullable
abstract DescriptorFactory<MethodDescriptor> getSingleAbstractMethodDescriptorFactory();
@Nullable
abstract DescriptorFactory<ImmutableList<FieldDescriptor>> getDeclaredFieldDescriptorsFactory();
@Nullable
abstract Supplier<ImmutableList<TypeDeclaration>> getMemberTypeDeclarationsFactory();
abstract Supplier<ImmutableList<Annotation>> getAnnotationsFactory();
abstract Builder toBuilder();
public static Builder newBuilder() {
return new AutoValue_TypeDeclaration.Builder()
// Default values.
.setVisibility(Visibility.PUBLIC)
.setSourceLanguage(SourceLanguage.JAVA)
.setOrigin(Origin.SOURCE)
.setHasAbstractModifier(false)
.setAnonymous(false)
.setNative(false)
.setAnnotation(false)
.setCapturingEnclosingInstance(false)
.setFinal(false)
.setFunctionalInterface(false)
.setAnnotationsFactory(ImmutableList::of)
.setJsFunctionInterface(false)
.setJsType(false)
.setLocal(false)
.setNullMarked(false)
.setTypeParameterDescriptors(ImmutableList.of())
.setDeclaredMethodDescriptorsFactory(() -> ImmutableList.of())
.setSingleAbstractMethodDescriptorFactory(() -> null)
.setDeclaredFieldDescriptorsFactory(() -> ImmutableList.of())
.setMemberTypeDeclarationsFactory(() -> ImmutableList.of())
.setInterfaceTypeDescriptorsFactory(() -> ImmutableList.of())
.setEnclosingMethodDescriptorFactory(() -> null)
.setSuperTypeDescriptorFactory(() -> null);
}
// TODO(b/340930928): This is a temporary hack since JsFunction is not supported in Wasm.
private static final ThreadLocal<Boolean> ignoreJsFunctionAnnotations =
ThreadLocal.withInitial(() -> false);
public static void setIgnoreJsFunctionAnnotations() {
ignoreJsFunctionAnnotations.set(true);
}
// TODO(b/181615162): This is a temporary hack which allows Wasm to treat JsEnums differently from
// Closure.
// In Wasm:
// - TODO(b/288145698): Native JsEnums are ignored (the annotation is removed on creation of
// TypeDeclaration)
// - The supertype of JsEnums is not modified (it is still Enum, not changed to Object).
private static final ThreadLocal<Boolean> implementWasmJsEnumSemantics =
ThreadLocal.withInitial(() -> false);
public static void setImplementWasmJsEnumSemantics() {
implementWasmJsEnumSemantics.set(true);
}
TypeDeclaration acceptInternal(Processor processor) {
return Visitor_TypeDeclaration.visit(processor, this);
}
/** Builder for a TypeDeclaration. */
@AutoValue.Builder
public abstract static class Builder {
public abstract Builder setAnonymous(boolean isAnonymous);
public abstract Builder setClassComponents(String... classComponents);
public abstract Builder setClassComponents(List<String> classComponents);
public abstract Builder setEnclosingTypeDeclaration(TypeDeclaration enclosingTypeDeclaration);
public abstract Builder setEnclosingMethodDescriptorFactory(
Supplier<MethodDescriptor> enclosingMethodDescriptorFactory);
public abstract Builder setOverlaidTypeDeclaration(TypeDeclaration typeDeclaration);
public abstract Builder setHasAbstractModifier(boolean hasAbstractModifier);
public abstract Builder setKind(Kind kind);
public abstract Builder setAnnotation(boolean isAnnotation);
public abstract Builder setSourceLanguage(SourceLanguage sourceLanguage);
public abstract Builder setCapturingEnclosingInstance(boolean capturingEnclosingInstance);
public abstract Builder setFinal(boolean isFinal);
public abstract Builder setFunctionalInterface(boolean isFunctionalInterface);
public abstract Builder setOrigin(Origin origin);
public abstract Builder setAnnotationsFactory(
Supplier<ImmutableList<Annotation>> annotationsFactory);
public abstract Builder setJsFunctionInterface(boolean isJsFunctionInterface);
public abstract Builder setJsType(boolean isJsType);
public abstract Builder setJsEnumInfo(JsEnumInfo jsEnumInfo);
public abstract Builder setLocal(boolean local);
public abstract Builder setNative(boolean isNative);
public abstract Builder setKtTypeInfo(KtTypeInfo ktTypeInfo);
public abstract Builder setTypeParameterDescriptors(
Iterable<TypeVariable> typeParameterDescriptors);
public abstract Builder setVisibility(Visibility visibility);
public abstract Builder setOriginalSimpleSourceName(String originalSimpleSourceName);
public abstract Builder setPackage(PackageDeclaration packageDeclaration);
private String qualifiedSourceName;
@CanIgnoreReturnValue
public Builder setQualifiedSourceName(String qualifiedSourceName) {
this.qualifiedSourceName = qualifiedSourceName;
return this;
}
public abstract Builder setSimpleJsName(String simpleJsName);
public abstract Builder setCustomizedJsNamespace(String jsNamespace);
public abstract Builder setObjectiveCNamePrefix(String objectiveCNamePrefix);
public abstract Builder setNullMarked(boolean isNullMarked);
public abstract Builder setInterfaceTypeDescriptorsFactory(
DescriptorFactory<ImmutableList<DeclaredTypeDescriptor>> interfaceTypeDescriptorsFactory);
public Builder setInterfaceTypeDescriptorsFactory(
Supplier<ImmutableList<DeclaredTypeDescriptor>> interfaceTypeDescriptorsFactory) {
return setInterfaceTypeDescriptorsFactory(
typeDescriptor -> interfaceTypeDescriptorsFactory.get());
}
public abstract Builder setSuperTypeDescriptorFactory(
DescriptorFactory<DeclaredTypeDescriptor> superTypeDescriptorFactory);
public Builder setSuperTypeDescriptorFactory(
Supplier<DeclaredTypeDescriptor> superTypeDescriptorFactory) {
return setSuperTypeDescriptorFactory(typeDescriptor -> superTypeDescriptorFactory.get());
}
public abstract Builder setDeclaredMethodDescriptorsFactory(
DescriptorFactory<ImmutableList<MethodDescriptor>> declaredMethodDescriptorsFactory);
public Builder setDeclaredMethodDescriptorsFactory(
Supplier<ImmutableList<MethodDescriptor>> declaredMethodDescriptorsFactory) {
return setDeclaredMethodDescriptorsFactory(
typeDescriptor -> declaredMethodDescriptorsFactory.get());
}
public abstract Builder setSingleAbstractMethodDescriptorFactory(
DescriptorFactory<MethodDescriptor> singleDeclaredAbstractMethodDescriptorFactory);
public Builder setSingleAbstractMethodDescriptorFactory(
Supplier<MethodDescriptor> singleDeclaredAbstractMethodDescriptorFactory) {
return setSingleAbstractMethodDescriptorFactory(
typeDescriptor -> singleDeclaredAbstractMethodDescriptorFactory.get());
}
public abstract Builder setDeclaredFieldDescriptorsFactory(
DescriptorFactory<ImmutableList<FieldDescriptor>> declaredFieldDescriptorsFactory);
public Builder setDeclaredFieldDescriptorsFactory(
Supplier<ImmutableList<FieldDescriptor>> declaredFieldDescriptorsFactory) {
return setDeclaredFieldDescriptorsFactory(
typeDescriptor -> declaredFieldDescriptorsFactory.get());
}
public abstract Builder setMemberTypeDeclarationsFactory(
Supplier<ImmutableList<TypeDeclaration>> memberTypeDeclarationsFactory);
// Builder accessors to aid construction.
abstract Optional<ImmutableList<String>> getClassComponents();
abstract Optional<String> getSimpleJsName();
abstract Optional<PackageDeclaration> getPackage();
abstract Optional<TypeDeclaration> getEnclosingTypeDeclaration();
abstract Optional<JsEnumInfo> getJsEnumInfo();
abstract boolean isJsFunctionInterface();
abstract boolean isNative();
abstract Kind getKind();
abstract boolean isAnnotation();
private static final ThreadLocalInterner<TypeDeclaration> interner =
new ThreadLocalInterner<>();
abstract TypeDeclaration autoBuild();
public TypeDeclaration build() {
if (isJsFunctionInterface() && ignoreJsFunctionAnnotations.get()) {
setJsFunctionInterface(false);
}
// TODO(b/181615162): Find a better way to expose different flavors of type models by backend.
if (getKind() == Kind.ENUM && isNative() && implementWasmJsEnumSemantics.get()) {
setJsEnumInfo(null);
setNative(false);
}
if (qualifiedSourceName != null) {
// Setting qualifiedSourceName is only allowed for top-level types and shouldn't be mixed
// with other construction styles (like providing packages, class components, etc.).
checkState(getEnclosingTypeDeclaration().isEmpty());
checkState(getPackage().isEmpty());
checkState(getClassComponents().isEmpty());
int lastDot = qualifiedSourceName.lastIndexOf('.');
setPackage(
PackageDeclaration.newBuilder()
.setName(lastDot == -1 ? "" : qualifiedSourceName.substring(0, lastDot))
.build());
setClassComponents(qualifiedSourceName.substring(lastDot + 1));
}
if (getPackage().isEmpty()) {
// If no package is set, enclosing type is mandatory where we can get the package from.
setPackage(getEnclosingTypeDeclaration().get().getPackage());
}
if (getSimpleJsName().isEmpty()) {
setSimpleJsName(AstUtils.getSimpleSourceName(getClassComponents().get()));
}
checkState(!isAnnotation() || getKind() == Kind.INTERFACE);
TypeDeclaration typeDeclaration = autoBuild();
// Has to be an interface to be a functional interface.
checkState(typeDeclaration.isInterface() || !typeDeclaration.isFunctionalInterface());
checkState(
typeDeclaration.getTypeParameterDescriptors().stream()
.noneMatch(TypeVariable::isWildcardOrCapture));
checkState(
typeDeclaration.getTypeParameterDescriptors().stream()
.map(TypeVariable::getNullabilityAnnotation)
.allMatch(Predicate.isEqual(NullabilityAnnotation.NONE)));
return interner.intern(typeDeclaration);
}
public static Builder from(TypeDeclaration typeDeclaration) {
return typeDeclaration.toBuilder();
}
}
}
|
googleapis/google-api-java-client-services | 36,129 | clients/google-api-services-realtimebidding/v1alpha/1.31.0/com/google/api/services/realtimebidding/v1alpha/RealTimeBidding.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.realtimebidding.v1alpha;
/**
* Service definition for RealTimeBidding (v1alpha).
*
* <p>
* Allows external bidders to manage their RTB integration with Google. This includes managing bidder endpoints, QPS quotas, configuring what ad inventory to receive via pretargeting, submitting creatives for verification, and accessing creative metadata such as approval status.
* </p>
*
* <p>
* For more information about this service, see the
* <a href="https://developers.google.com/authorized-buyers/apis/realtimebidding/reference/rest/" target="_blank">API Documentation</a>
* </p>
*
* <p>
* This service uses {@link RealTimeBiddingRequestInitializer} to initialize global parameters via its
* {@link Builder}.
* </p>
*
* @since 1.3
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public class RealTimeBidding extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient {
// Note: Leave this static initializer at the top of the file.
static {
com.google.api.client.util.Preconditions.checkState(
com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 &&
(com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 32 ||
(com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION == 31 &&
com.google.api.client.googleapis.GoogleUtils.BUGFIX_VERSION >= 1)),
"You are currently running with version %s of google-api-client. " +
"You need at least version 1.31.1 of google-api-client to run version " +
"1.32.1 of the Real-time Bidding API library.", com.google.api.client.googleapis.GoogleUtils.VERSION);
}
/**
* The default encoded root URL of the service. This is determined when the library is generated
* and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_ROOT_URL = "https://realtimebidding.googleapis.com/";
/**
* The default encoded mTLS root URL of the service. This is determined when the library is generated
* and normally should not be changed.
*
* @since 1.31
*/
public static final String DEFAULT_MTLS_ROOT_URL = "https://realtimebidding.mtls.googleapis.com/";
/**
* The default encoded service path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_SERVICE_PATH = "";
/**
* The default encoded batch path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.23
*/
public static final String DEFAULT_BATCH_PATH = "batch";
/**
* The default encoded base URL of the service. This is determined when the library is generated
* and normally should not be changed.
*/
public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH;
/**
* Constructor.
*
* <p>
* Use {@link Builder} if you need to specify any of the optional parameters.
* </p>
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public RealTimeBidding(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
this(new Builder(transport, jsonFactory, httpRequestInitializer));
}
/**
* @param builder builder
*/
RealTimeBidding(Builder builder) {
super(builder);
}
@Override
protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException {
super.initialize(httpClientRequest);
}
/**
* An accessor for creating requests from the Bidders collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code RealTimeBidding realtimebidding = new RealTimeBidding(...);}
* {@code RealTimeBidding.Bidders.List request = realtimebidding.bidders().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Bidders bidders() {
return new Bidders();
}
/**
* The "bidders" collection of methods.
*/
public class Bidders {
/**
* An accessor for creating requests from the BiddingFunctions collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code RealTimeBidding realtimebidding = new RealTimeBidding(...);}
* {@code RealTimeBidding.BiddingFunctions.List request = realtimebidding.biddingFunctions().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public BiddingFunctions biddingFunctions() {
return new BiddingFunctions();
}
/**
* The "biddingFunctions" collection of methods.
*/
public class BiddingFunctions {
/**
* Activates an existing bidding function. An activated function is available for invocation for the
* server-side TURTLEDOVE simulations.
*
* Create a request for the method "biddingFunctions.activate".
*
* This request holds the parameters needed by the realtimebidding server. After setting any
* optional parameters, call the {@link Activate#execute()} method to invoke the remote operation.
*
* @param name Required. The name of the bidding function to activate. Format:
* `bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
* @param content the {@link com.google.api.services.realtimebidding.v1alpha.model.ActivateBiddingFunctionRequest}
* @return the request
*/
public Activate activate(java.lang.String name, com.google.api.services.realtimebidding.v1alpha.model.ActivateBiddingFunctionRequest content) throws java.io.IOException {
Activate result = new Activate(name, content);
initialize(result);
return result;
}
public class Activate extends RealTimeBiddingRequest<com.google.api.services.realtimebidding.v1alpha.model.BiddingFunction> {
private static final String REST_PATH = "v1alpha/{+name}:activate";
private final java.util.regex.Pattern NAME_PATTERN =
java.util.regex.Pattern.compile("^bidders/[^/]+/biddingFunctions/[^/]+$");
/**
* Activates an existing bidding function. An activated function is available for invocation for
* the server-side TURTLEDOVE simulations.
*
* Create a request for the method "biddingFunctions.activate".
*
* This request holds the parameters needed by the the realtimebidding server. After setting any
* optional parameters, call the {@link Activate#execute()} method to invoke the remote operation.
* <p> {@link
* Activate#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
* must be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param name Required. The name of the bidding function to activate. Format:
* `bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
* @param content the {@link com.google.api.services.realtimebidding.v1alpha.model.ActivateBiddingFunctionRequest}
* @since 1.13
*/
protected Activate(java.lang.String name, com.google.api.services.realtimebidding.v1alpha.model.ActivateBiddingFunctionRequest content) {
super(RealTimeBidding.this, "POST", REST_PATH, content, com.google.api.services.realtimebidding.v1alpha.model.BiddingFunction.class);
this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^bidders/[^/]+/biddingFunctions/[^/]+$");
}
}
@Override
public Activate set$Xgafv(java.lang.String $Xgafv) {
return (Activate) super.set$Xgafv($Xgafv);
}
@Override
public Activate setAccessToken(java.lang.String accessToken) {
return (Activate) super.setAccessToken(accessToken);
}
@Override
public Activate setAlt(java.lang.String alt) {
return (Activate) super.setAlt(alt);
}
@Override
public Activate setCallback(java.lang.String callback) {
return (Activate) super.setCallback(callback);
}
@Override
public Activate setFields(java.lang.String fields) {
return (Activate) super.setFields(fields);
}
@Override
public Activate setKey(java.lang.String key) {
return (Activate) super.setKey(key);
}
@Override
public Activate setOauthToken(java.lang.String oauthToken) {
return (Activate) super.setOauthToken(oauthToken);
}
@Override
public Activate setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Activate) super.setPrettyPrint(prettyPrint);
}
@Override
public Activate setQuotaUser(java.lang.String quotaUser) {
return (Activate) super.setQuotaUser(quotaUser);
}
@Override
public Activate setUploadType(java.lang.String uploadType) {
return (Activate) super.setUploadType(uploadType);
}
@Override
public Activate setUploadProtocol(java.lang.String uploadProtocol) {
return (Activate) super.setUploadProtocol(uploadProtocol);
}
/**
* Required. The name of the bidding function to activate. Format:
* `bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
*/
@com.google.api.client.util.Key
private java.lang.String name;
/** Required. The name of the bidding function to activate. Format:
`bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
*/
public java.lang.String getName() {
return name;
}
/**
* Required. The name of the bidding function to activate. Format:
* `bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
*/
public Activate setName(java.lang.String name) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^bidders/[^/]+/biddingFunctions/[^/]+$");
}
this.name = name;
return this;
}
@Override
public Activate set(String parameterName, Object value) {
return (Activate) super.set(parameterName, value);
}
}
/**
* Archives an existing bidding function. An archived function will not be available for function
* invocation for the server-side TURTLEDOVE simulations unless it is activated.
*
* Create a request for the method "biddingFunctions.archive".
*
* This request holds the parameters needed by the realtimebidding server. After setting any
* optional parameters, call the {@link Archive#execute()} method to invoke the remote operation.
*
* @param name Required. The name of the bidding function to archive. Format:
* `bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
* @param content the {@link com.google.api.services.realtimebidding.v1alpha.model.ArchiveBiddingFunctionRequest}
* @return the request
*/
public Archive archive(java.lang.String name, com.google.api.services.realtimebidding.v1alpha.model.ArchiveBiddingFunctionRequest content) throws java.io.IOException {
Archive result = new Archive(name, content);
initialize(result);
return result;
}
public class Archive extends RealTimeBiddingRequest<com.google.api.services.realtimebidding.v1alpha.model.BiddingFunction> {
private static final String REST_PATH = "v1alpha/{+name}:archive";
private final java.util.regex.Pattern NAME_PATTERN =
java.util.regex.Pattern.compile("^bidders/[^/]+/biddingFunctions/[^/]+$");
/**
* Archives an existing bidding function. An archived function will not be available for function
* invocation for the server-side TURTLEDOVE simulations unless it is activated.
*
* Create a request for the method "biddingFunctions.archive".
*
* This request holds the parameters needed by the the realtimebidding server. After setting any
* optional parameters, call the {@link Archive#execute()} method to invoke the remote operation.
* <p> {@link
* Archive#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must
* be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param name Required. The name of the bidding function to archive. Format:
* `bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
* @param content the {@link com.google.api.services.realtimebidding.v1alpha.model.ArchiveBiddingFunctionRequest}
* @since 1.13
*/
protected Archive(java.lang.String name, com.google.api.services.realtimebidding.v1alpha.model.ArchiveBiddingFunctionRequest content) {
super(RealTimeBidding.this, "POST", REST_PATH, content, com.google.api.services.realtimebidding.v1alpha.model.BiddingFunction.class);
this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^bidders/[^/]+/biddingFunctions/[^/]+$");
}
}
@Override
public Archive set$Xgafv(java.lang.String $Xgafv) {
return (Archive) super.set$Xgafv($Xgafv);
}
@Override
public Archive setAccessToken(java.lang.String accessToken) {
return (Archive) super.setAccessToken(accessToken);
}
@Override
public Archive setAlt(java.lang.String alt) {
return (Archive) super.setAlt(alt);
}
@Override
public Archive setCallback(java.lang.String callback) {
return (Archive) super.setCallback(callback);
}
@Override
public Archive setFields(java.lang.String fields) {
return (Archive) super.setFields(fields);
}
@Override
public Archive setKey(java.lang.String key) {
return (Archive) super.setKey(key);
}
@Override
public Archive setOauthToken(java.lang.String oauthToken) {
return (Archive) super.setOauthToken(oauthToken);
}
@Override
public Archive setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Archive) super.setPrettyPrint(prettyPrint);
}
@Override
public Archive setQuotaUser(java.lang.String quotaUser) {
return (Archive) super.setQuotaUser(quotaUser);
}
@Override
public Archive setUploadType(java.lang.String uploadType) {
return (Archive) super.setUploadType(uploadType);
}
@Override
public Archive setUploadProtocol(java.lang.String uploadProtocol) {
return (Archive) super.setUploadProtocol(uploadProtocol);
}
/**
* Required. The name of the bidding function to archive. Format:
* `bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
*/
@com.google.api.client.util.Key
private java.lang.String name;
/** Required. The name of the bidding function to archive. Format:
`bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
*/
public java.lang.String getName() {
return name;
}
/**
* Required. The name of the bidding function to archive. Format:
* `bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
*/
public Archive setName(java.lang.String name) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^bidders/[^/]+/biddingFunctions/[^/]+$");
}
this.name = name;
return this;
}
@Override
public Archive set(String parameterName, Object value) {
return (Archive) super.set(parameterName, value);
}
}
/**
* Creates a new bidding function.
*
* Create a request for the method "biddingFunctions.create".
*
* This request holds the parameters needed by the realtimebidding server. After setting any
* optional parameters, call the {@link Create#execute()} method to invoke the remote operation.
*
* @param parent Required. The name of the bidder for which to create the bidding function. Format:
* `bidders/{bidderAccountId}`
* @param content the {@link com.google.api.services.realtimebidding.v1alpha.model.BiddingFunction}
* @return the request
*/
public Create create(java.lang.String parent, com.google.api.services.realtimebidding.v1alpha.model.BiddingFunction content) throws java.io.IOException {
Create result = new Create(parent, content);
initialize(result);
return result;
}
public class Create extends RealTimeBiddingRequest<com.google.api.services.realtimebidding.v1alpha.model.BiddingFunction> {
private static final String REST_PATH = "v1alpha/{+parent}/biddingFunctions";
private final java.util.regex.Pattern PARENT_PATTERN =
java.util.regex.Pattern.compile("^bidders/[^/]+$");
/**
* Creates a new bidding function.
*
* Create a request for the method "biddingFunctions.create".
*
* This request holds the parameters needed by the the realtimebidding server. After setting any
* optional parameters, call the {@link Create#execute()} method to invoke the remote operation.
* <p> {@link
* Create#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must
* be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param parent Required. The name of the bidder for which to create the bidding function. Format:
* `bidders/{bidderAccountId}`
* @param content the {@link com.google.api.services.realtimebidding.v1alpha.model.BiddingFunction}
* @since 1.13
*/
protected Create(java.lang.String parent, com.google.api.services.realtimebidding.v1alpha.model.BiddingFunction content) {
super(RealTimeBidding.this, "POST", REST_PATH, content, com.google.api.services.realtimebidding.v1alpha.model.BiddingFunction.class);
this.parent = com.google.api.client.util.Preconditions.checkNotNull(parent, "Required parameter parent must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(PARENT_PATTERN.matcher(parent).matches(),
"Parameter parent must conform to the pattern " +
"^bidders/[^/]+$");
}
}
@Override
public Create set$Xgafv(java.lang.String $Xgafv) {
return (Create) super.set$Xgafv($Xgafv);
}
@Override
public Create setAccessToken(java.lang.String accessToken) {
return (Create) super.setAccessToken(accessToken);
}
@Override
public Create setAlt(java.lang.String alt) {
return (Create) super.setAlt(alt);
}
@Override
public Create setCallback(java.lang.String callback) {
return (Create) super.setCallback(callback);
}
@Override
public Create setFields(java.lang.String fields) {
return (Create) super.setFields(fields);
}
@Override
public Create setKey(java.lang.String key) {
return (Create) super.setKey(key);
}
@Override
public Create setOauthToken(java.lang.String oauthToken) {
return (Create) super.setOauthToken(oauthToken);
}
@Override
public Create setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Create) super.setPrettyPrint(prettyPrint);
}
@Override
public Create setQuotaUser(java.lang.String quotaUser) {
return (Create) super.setQuotaUser(quotaUser);
}
@Override
public Create setUploadType(java.lang.String uploadType) {
return (Create) super.setUploadType(uploadType);
}
@Override
public Create setUploadProtocol(java.lang.String uploadProtocol) {
return (Create) super.setUploadProtocol(uploadProtocol);
}
/**
* Required. The name of the bidder for which to create the bidding function. Format:
* `bidders/{bidderAccountId}`
*/
@com.google.api.client.util.Key
private java.lang.String parent;
/** Required. The name of the bidder for which to create the bidding function. Format:
`bidders/{bidderAccountId}`
*/
public java.lang.String getParent() {
return parent;
}
/**
* Required. The name of the bidder for which to create the bidding function. Format:
* `bidders/{bidderAccountId}`
*/
public Create setParent(java.lang.String parent) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(PARENT_PATTERN.matcher(parent).matches(),
"Parameter parent must conform to the pattern " +
"^bidders/[^/]+$");
}
this.parent = parent;
return this;
}
@Override
public Create set(String parameterName, Object value) {
return (Create) super.set(parameterName, value);
}
}
/**
* Lists the bidding functions that a bidder currently has registered.
*
* Create a request for the method "biddingFunctions.list".
*
* This request holds the parameters needed by the realtimebidding server. After setting any
* optional parameters, call the {@link List#execute()} method to invoke the remote operation.
*
* @param parent Required. Name of the bidder whose bidding functions will be listed. Format:
* `bidders/{bidder_account_id}`
* @return the request
*/
public List list(java.lang.String parent) throws java.io.IOException {
List result = new List(parent);
initialize(result);
return result;
}
public class List extends RealTimeBiddingRequest<com.google.api.services.realtimebidding.v1alpha.model.ListBiddingFunctionsResponse> {
private static final String REST_PATH = "v1alpha/{+parent}/biddingFunctions";
private final java.util.regex.Pattern PARENT_PATTERN =
java.util.regex.Pattern.compile("^bidders/[^/]+$");
/**
* Lists the bidding functions that a bidder currently has registered.
*
* Create a request for the method "biddingFunctions.list".
*
* This request holds the parameters needed by the the realtimebidding server. After setting any
* optional parameters, call the {@link List#execute()} method to invoke the remote operation. <p>
* {@link List#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
* must be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param parent Required. Name of the bidder whose bidding functions will be listed. Format:
* `bidders/{bidder_account_id}`
* @since 1.13
*/
protected List(java.lang.String parent) {
super(RealTimeBidding.this, "GET", REST_PATH, null, com.google.api.services.realtimebidding.v1alpha.model.ListBiddingFunctionsResponse.class);
this.parent = com.google.api.client.util.Preconditions.checkNotNull(parent, "Required parameter parent must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(PARENT_PATTERN.matcher(parent).matches(),
"Parameter parent must conform to the pattern " +
"^bidders/[^/]+$");
}
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public List set$Xgafv(java.lang.String $Xgafv) {
return (List) super.set$Xgafv($Xgafv);
}
@Override
public List setAccessToken(java.lang.String accessToken) {
return (List) super.setAccessToken(accessToken);
}
@Override
public List setAlt(java.lang.String alt) {
return (List) super.setAlt(alt);
}
@Override
public List setCallback(java.lang.String callback) {
return (List) super.setCallback(callback);
}
@Override
public List setFields(java.lang.String fields) {
return (List) super.setFields(fields);
}
@Override
public List setKey(java.lang.String key) {
return (List) super.setKey(key);
}
@Override
public List setOauthToken(java.lang.String oauthToken) {
return (List) super.setOauthToken(oauthToken);
}
@Override
public List setPrettyPrint(java.lang.Boolean prettyPrint) {
return (List) super.setPrettyPrint(prettyPrint);
}
@Override
public List setQuotaUser(java.lang.String quotaUser) {
return (List) super.setQuotaUser(quotaUser);
}
@Override
public List setUploadType(java.lang.String uploadType) {
return (List) super.setUploadType(uploadType);
}
@Override
public List setUploadProtocol(java.lang.String uploadProtocol) {
return (List) super.setUploadProtocol(uploadProtocol);
}
/**
* Required. Name of the bidder whose bidding functions will be listed. Format:
* `bidders/{bidder_account_id}`
*/
@com.google.api.client.util.Key
private java.lang.String parent;
/** Required. Name of the bidder whose bidding functions will be listed. Format:
`bidders/{bidder_account_id}`
*/
public java.lang.String getParent() {
return parent;
}
/**
* Required. Name of the bidder whose bidding functions will be listed. Format:
* `bidders/{bidder_account_id}`
*/
public List setParent(java.lang.String parent) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(PARENT_PATTERN.matcher(parent).matches(),
"Parameter parent must conform to the pattern " +
"^bidders/[^/]+$");
}
this.parent = parent;
return this;
}
/** The maximum number of bidding functions to return. */
@com.google.api.client.util.Key
private java.lang.Integer pageSize;
/** The maximum number of bidding functions to return.
*/
public java.lang.Integer getPageSize() {
return pageSize;
}
/** The maximum number of bidding functions to return. */
public List setPageSize(java.lang.Integer pageSize) {
this.pageSize = pageSize;
return this;
}
/**
* A token identifying a page of results the server should return. This value is received
* from a previous `ListBiddingFunctions` call in
* ListBiddingFunctionsResponse.nextPageToken.
*/
@com.google.api.client.util.Key
private java.lang.String pageToken;
/** A token identifying a page of results the server should return. This value is received from a
previous `ListBiddingFunctions` call in ListBiddingFunctionsResponse.nextPageToken.
*/
public java.lang.String getPageToken() {
return pageToken;
}
/**
* A token identifying a page of results the server should return. This value is received
* from a previous `ListBiddingFunctions` call in
* ListBiddingFunctionsResponse.nextPageToken.
*/
public List setPageToken(java.lang.String pageToken) {
this.pageToken = pageToken;
return this;
}
@Override
public List set(String parameterName, Object value) {
return (List) super.set(parameterName, value);
}
}
}
}
/**
* Builder for {@link RealTimeBidding}.
*
* <p>
* Implementation is not thread-safe.
* </p>
*
* @since 1.3.0
*/
public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder {
private static String chooseEndpoint(com.google.api.client.http.HttpTransport transport) {
// If the GOOGLE_API_USE_MTLS_ENDPOINT environment variable value is "always", use mTLS endpoint.
// If the env variable is "auto", use mTLS endpoint if and only if the transport is mTLS.
// Use the regular endpoint for all other cases.
String useMtlsEndpoint = System.getenv("GOOGLE_API_USE_MTLS_ENDPOINT");
useMtlsEndpoint = useMtlsEndpoint == null ? "auto" : useMtlsEndpoint;
if ("always".equals(useMtlsEndpoint) || ("auto".equals(useMtlsEndpoint) && transport != null && transport.isMtls())) {
return DEFAULT_MTLS_ROOT_URL;
}
return DEFAULT_ROOT_URL;
}
/**
* Returns an instance of a new builder.
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
super(
transport,
jsonFactory,
Builder.chooseEndpoint(transport),
DEFAULT_SERVICE_PATH,
httpRequestInitializer,
false);
setBatchPath(DEFAULT_BATCH_PATH);
}
/** Builds a new instance of {@link RealTimeBidding}. */
@Override
public RealTimeBidding build() {
return new RealTimeBidding(this);
}
@Override
public Builder setRootUrl(String rootUrl) {
return (Builder) super.setRootUrl(rootUrl);
}
@Override
public Builder setServicePath(String servicePath) {
return (Builder) super.setServicePath(servicePath);
}
@Override
public Builder setBatchPath(String batchPath) {
return (Builder) super.setBatchPath(batchPath);
}
@Override
public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
return (Builder) super.setHttpRequestInitializer(httpRequestInitializer);
}
@Override
public Builder setApplicationName(String applicationName) {
return (Builder) super.setApplicationName(applicationName);
}
@Override
public Builder setSuppressPatternChecks(boolean suppressPatternChecks) {
return (Builder) super.setSuppressPatternChecks(suppressPatternChecks);
}
@Override
public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) {
return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks);
}
@Override
public Builder setSuppressAllChecks(boolean suppressAllChecks) {
return (Builder) super.setSuppressAllChecks(suppressAllChecks);
}
/**
* Set the {@link RealTimeBiddingRequestInitializer}.
*
* @since 1.12
*/
public Builder setRealTimeBiddingRequestInitializer(
RealTimeBiddingRequestInitializer realtimebiddingRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(realtimebiddingRequestInitializer);
}
@Override
public Builder setGoogleClientRequestInitializer(
com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer);
}
}
}
|
googleapis/google-cloud-java | 35,993 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/ListIndexesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/index_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Response message for
* [IndexService.ListIndexes][google.cloud.aiplatform.v1.IndexService.ListIndexes].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ListIndexesResponse}
*/
public final class ListIndexesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ListIndexesResponse)
ListIndexesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListIndexesResponse.newBuilder() to construct.
private ListIndexesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListIndexesResponse() {
indexes_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListIndexesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.IndexServiceProto
.internal_static_google_cloud_aiplatform_v1_ListIndexesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.IndexServiceProto
.internal_static_google_cloud_aiplatform_v1_ListIndexesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ListIndexesResponse.class,
com.google.cloud.aiplatform.v1.ListIndexesResponse.Builder.class);
}
public static final int INDEXES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.aiplatform.v1.Index> indexes_;
/**
*
*
* <pre>
* List of indexes in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Index indexes = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.aiplatform.v1.Index> getIndexesList() {
return indexes_;
}
/**
*
*
* <pre>
* List of indexes in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Index indexes = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.aiplatform.v1.IndexOrBuilder>
getIndexesOrBuilderList() {
return indexes_;
}
/**
*
*
* <pre>
* List of indexes in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Index indexes = 1;</code>
*/
@java.lang.Override
public int getIndexesCount() {
return indexes_.size();
}
/**
*
*
* <pre>
* List of indexes in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Index indexes = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.Index getIndexes(int index) {
return indexes_.get(index);
}
/**
*
*
* <pre>
* List of indexes in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Index indexes = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.IndexOrBuilder getIndexesOrBuilder(int index) {
return indexes_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to
* [ListIndexesRequest.page_token][google.cloud.aiplatform.v1.ListIndexesRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to
* [ListIndexesRequest.page_token][google.cloud.aiplatform.v1.ListIndexesRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < indexes_.size(); i++) {
output.writeMessage(1, indexes_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < indexes_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, indexes_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.ListIndexesResponse)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.ListIndexesResponse other =
(com.google.cloud.aiplatform.v1.ListIndexesResponse) obj;
if (!getIndexesList().equals(other.getIndexesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getIndexesCount() > 0) {
hash = (37 * hash) + INDEXES_FIELD_NUMBER;
hash = (53 * hash) + getIndexesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.ListIndexesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListIndexesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListIndexesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListIndexesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListIndexesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListIndexesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListIndexesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListIndexesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListIndexesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListIndexesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListIndexesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListIndexesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.aiplatform.v1.ListIndexesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [IndexService.ListIndexes][google.cloud.aiplatform.v1.IndexService.ListIndexes].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ListIndexesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ListIndexesResponse)
com.google.cloud.aiplatform.v1.ListIndexesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.IndexServiceProto
.internal_static_google_cloud_aiplatform_v1_ListIndexesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.IndexServiceProto
.internal_static_google_cloud_aiplatform_v1_ListIndexesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ListIndexesResponse.class,
com.google.cloud.aiplatform.v1.ListIndexesResponse.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.ListIndexesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (indexesBuilder_ == null) {
indexes_ = java.util.Collections.emptyList();
} else {
indexes_ = null;
indexesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.IndexServiceProto
.internal_static_google_cloud_aiplatform_v1_ListIndexesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListIndexesResponse getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.ListIndexesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListIndexesResponse build() {
com.google.cloud.aiplatform.v1.ListIndexesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListIndexesResponse buildPartial() {
com.google.cloud.aiplatform.v1.ListIndexesResponse result =
new com.google.cloud.aiplatform.v1.ListIndexesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.aiplatform.v1.ListIndexesResponse result) {
if (indexesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
indexes_ = java.util.Collections.unmodifiableList(indexes_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.indexes_ = indexes_;
} else {
result.indexes_ = indexesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.aiplatform.v1.ListIndexesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.ListIndexesResponse) {
return mergeFrom((com.google.cloud.aiplatform.v1.ListIndexesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.ListIndexesResponse other) {
if (other == com.google.cloud.aiplatform.v1.ListIndexesResponse.getDefaultInstance())
return this;
if (indexesBuilder_ == null) {
if (!other.indexes_.isEmpty()) {
if (indexes_.isEmpty()) {
indexes_ = other.indexes_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureIndexesIsMutable();
indexes_.addAll(other.indexes_);
}
onChanged();
}
} else {
if (!other.indexes_.isEmpty()) {
if (indexesBuilder_.isEmpty()) {
indexesBuilder_.dispose();
indexesBuilder_ = null;
indexes_ = other.indexes_;
bitField0_ = (bitField0_ & ~0x00000001);
indexesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getIndexesFieldBuilder()
: null;
} else {
indexesBuilder_.addAllMessages(other.indexes_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.aiplatform.v1.Index m =
input.readMessage(
com.google.cloud.aiplatform.v1.Index.parser(), extensionRegistry);
if (indexesBuilder_ == null) {
ensureIndexesIsMutable();
indexes_.add(m);
} else {
indexesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.aiplatform.v1.Index> indexes_ =
java.util.Collections.emptyList();
private void ensureIndexesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
indexes_ = new java.util.ArrayList<com.google.cloud.aiplatform.v1.Index>(indexes_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Index,
com.google.cloud.aiplatform.v1.Index.Builder,
com.google.cloud.aiplatform.v1.IndexOrBuilder>
indexesBuilder_;
/**
*
*
* <pre>
* List of indexes in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Index indexes = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.Index> getIndexesList() {
if (indexesBuilder_ == null) {
return java.util.Collections.unmodifiableList(indexes_);
} else {
return indexesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of indexes in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Index indexes = 1;</code>
*/
public int getIndexesCount() {
if (indexesBuilder_ == null) {
return indexes_.size();
} else {
return indexesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of indexes in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Index indexes = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Index getIndexes(int index) {
if (indexesBuilder_ == null) {
return indexes_.get(index);
} else {
return indexesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of indexes in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Index indexes = 1;</code>
*/
public Builder setIndexes(int index, com.google.cloud.aiplatform.v1.Index value) {
if (indexesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIndexesIsMutable();
indexes_.set(index, value);
onChanged();
} else {
indexesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of indexes in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Index indexes = 1;</code>
*/
public Builder setIndexes(
int index, com.google.cloud.aiplatform.v1.Index.Builder builderForValue) {
if (indexesBuilder_ == null) {
ensureIndexesIsMutable();
indexes_.set(index, builderForValue.build());
onChanged();
} else {
indexesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of indexes in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Index indexes = 1;</code>
*/
public Builder addIndexes(com.google.cloud.aiplatform.v1.Index value) {
if (indexesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIndexesIsMutable();
indexes_.add(value);
onChanged();
} else {
indexesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of indexes in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Index indexes = 1;</code>
*/
public Builder addIndexes(int index, com.google.cloud.aiplatform.v1.Index value) {
if (indexesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIndexesIsMutable();
indexes_.add(index, value);
onChanged();
} else {
indexesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of indexes in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Index indexes = 1;</code>
*/
public Builder addIndexes(com.google.cloud.aiplatform.v1.Index.Builder builderForValue) {
if (indexesBuilder_ == null) {
ensureIndexesIsMutable();
indexes_.add(builderForValue.build());
onChanged();
} else {
indexesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of indexes in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Index indexes = 1;</code>
*/
public Builder addIndexes(
int index, com.google.cloud.aiplatform.v1.Index.Builder builderForValue) {
if (indexesBuilder_ == null) {
ensureIndexesIsMutable();
indexes_.add(index, builderForValue.build());
onChanged();
} else {
indexesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of indexes in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Index indexes = 1;</code>
*/
public Builder addAllIndexes(
java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.Index> values) {
if (indexesBuilder_ == null) {
ensureIndexesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, indexes_);
onChanged();
} else {
indexesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of indexes in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Index indexes = 1;</code>
*/
public Builder clearIndexes() {
if (indexesBuilder_ == null) {
indexes_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
indexesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of indexes in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Index indexes = 1;</code>
*/
public Builder removeIndexes(int index) {
if (indexesBuilder_ == null) {
ensureIndexesIsMutable();
indexes_.remove(index);
onChanged();
} else {
indexesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of indexes in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Index indexes = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Index.Builder getIndexesBuilder(int index) {
return getIndexesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of indexes in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Index indexes = 1;</code>
*/
public com.google.cloud.aiplatform.v1.IndexOrBuilder getIndexesOrBuilder(int index) {
if (indexesBuilder_ == null) {
return indexes_.get(index);
} else {
return indexesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of indexes in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Index indexes = 1;</code>
*/
public java.util.List<? extends com.google.cloud.aiplatform.v1.IndexOrBuilder>
getIndexesOrBuilderList() {
if (indexesBuilder_ != null) {
return indexesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(indexes_);
}
}
/**
*
*
* <pre>
* List of indexes in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Index indexes = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Index.Builder addIndexesBuilder() {
return getIndexesFieldBuilder()
.addBuilder(com.google.cloud.aiplatform.v1.Index.getDefaultInstance());
}
/**
*
*
* <pre>
* List of indexes in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Index indexes = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Index.Builder addIndexesBuilder(int index) {
return getIndexesFieldBuilder()
.addBuilder(index, com.google.cloud.aiplatform.v1.Index.getDefaultInstance());
}
/**
*
*
* <pre>
* List of indexes in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Index indexes = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.Index.Builder> getIndexesBuilderList() {
return getIndexesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Index,
com.google.cloud.aiplatform.v1.Index.Builder,
com.google.cloud.aiplatform.v1.IndexOrBuilder>
getIndexesFieldBuilder() {
if (indexesBuilder_ == null) {
indexesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Index,
com.google.cloud.aiplatform.v1.Index.Builder,
com.google.cloud.aiplatform.v1.IndexOrBuilder>(
indexes_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
indexes_ = null;
}
return indexesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to
* [ListIndexesRequest.page_token][google.cloud.aiplatform.v1.ListIndexesRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to
* [ListIndexesRequest.page_token][google.cloud.aiplatform.v1.ListIndexesRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to
* [ListIndexesRequest.page_token][google.cloud.aiplatform.v1.ListIndexesRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to
* [ListIndexesRequest.page_token][google.cloud.aiplatform.v1.ListIndexesRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to
* [ListIndexesRequest.page_token][google.cloud.aiplatform.v1.ListIndexesRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ListIndexesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ListIndexesResponse)
private static final com.google.cloud.aiplatform.v1.ListIndexesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ListIndexesResponse();
}
public static com.google.cloud.aiplatform.v1.ListIndexesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListIndexesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListIndexesResponse>() {
@java.lang.Override
public ListIndexesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListIndexesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListIndexesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListIndexesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,969 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListMemoriesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/memory_bank_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* Request message for
* [MemoryBankService.ListMemories][google.cloud.aiplatform.v1beta1.MemoryBankService.ListMemories].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.ListMemoriesRequest}
*/
public final class ListMemoriesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ListMemoriesRequest)
ListMemoriesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListMemoriesRequest.newBuilder() to construct.
private ListMemoriesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListMemoriesRequest() {
parent_ = "";
filter_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListMemoriesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.MemoryBankServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListMemoriesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.MemoryBankServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListMemoriesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest.class,
com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the ReasoningEngine to list the Memories
* under. Format:
* `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The resource name of the ReasoningEngine to list the Memories
* under. Format:
* `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. The standard list filter.
* More detail in [AIP-160](https://google.aip.dev/160).
*
* Supported fields (equality match only):
* * `scope` (as a JSON string)
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The standard list filter.
* More detail in [AIP-160](https://google.aip.dev/160).
*
* Supported fields (equality match only):
* * `scope` (as a JSON string)
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. The standard list page size.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. The standard list page token.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The standard list page token.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, filter_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, filter_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest other =
(com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [MemoryBankService.ListMemories][google.cloud.aiplatform.v1beta1.MemoryBankService.ListMemories].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.ListMemoriesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.ListMemoriesRequest)
com.google.cloud.aiplatform.v1beta1.ListMemoriesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.MemoryBankServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListMemoriesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.MemoryBankServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListMemoriesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest.class,
com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
filter_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.MemoryBankServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListMemoriesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest build() {
com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest buildPartial() {
com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest result =
new com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest) {
return mergeFrom((com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest other) {
if (other == com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the ReasoningEngine to list the Memories
* under. Format:
* `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the ReasoningEngine to list the Memories
* under. Format:
* `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the ReasoningEngine to list the Memories
* under. Format:
* `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the ReasoningEngine to list the Memories
* under. Format:
* `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the ReasoningEngine to list the Memories
* under. Format:
* `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. The standard list filter.
* More detail in [AIP-160](https://google.aip.dev/160).
*
* Supported fields (equality match only):
* * `scope` (as a JSON string)
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The standard list filter.
* More detail in [AIP-160](https://google.aip.dev/160).
*
* Supported fields (equality match only):
* * `scope` (as a JSON string)
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The standard list filter.
* More detail in [AIP-160](https://google.aip.dev/160).
*
* Supported fields (equality match only):
* * `scope` (as a JSON string)
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The standard list filter.
* More detail in [AIP-160](https://google.aip.dev/160).
*
* Supported fields (equality match only):
* * `scope` (as a JSON string)
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The standard list filter.
* More detail in [AIP-160](https://google.aip.dev/160).
*
* Supported fields (equality match only):
* * `scope` (as a JSON string)
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. The standard list page size.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. The standard list page size.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The standard list page size.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. The standard list page token.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The standard list page token.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The standard list page token.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The standard list page token.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The standard list page token.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.ListMemoriesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.ListMemoriesRequest)
private static final com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest();
}
public static com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListMemoriesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListMemoriesRequest>() {
@java.lang.Override
public ListMemoriesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListMemoriesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListMemoriesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ListMemoriesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/kafka | 36,484 | clients/src/test/java/org/apache/kafka/common/security/ssl/CommonNameLoggingTrustManagerFactoryWrapperTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.ssl;
import org.apache.kafka.common.security.ssl.CommonNameLoggingTrustManagerFactoryWrapper.CommonNameLoggingTrustManager;
import org.apache.kafka.common.security.ssl.CommonNameLoggingTrustManagerFactoryWrapper.NeverExpiringX509Certificate;
import org.apache.kafka.common.utils.LogCaptureAppender;
import org.apache.kafka.test.TestSslUtils;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.TestInstance.Lifecycle;
import java.nio.ByteBuffer;
import java.security.KeyPair;
import java.security.KeyStore;
import java.security.NoSuchAlgorithmException;
import java.security.PublicKey;
import java.security.SignatureException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.util.Date;
import java.util.List;
import javax.net.ssl.TrustManager;
import javax.net.ssl.TrustManagerFactory;
import javax.net.ssl.X509TrustManager;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertInstanceOf;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
@TestInstance(Lifecycle.PER_CLASS)
public class CommonNameLoggingTrustManagerFactoryWrapperTest {
private X509Certificate[] chainWithValidEndCertificate;
private X509Certificate[] chainWithExpiredEndCertificate;
private X509Certificate[] chainWithInvalidEndCertificate;
private X509Certificate[] chainWithMultipleEndCertificates;
private X509Certificate[] chainWithValidAndInvalidEndCertificates;
@BeforeAll
public void setUpOnce() throws CertificateException, NoSuchAlgorithmException {
chainWithValidEndCertificate = generateKeyChainIncludingCA(false, false, true, false);
chainWithExpiredEndCertificate = generateKeyChainIncludingCA(true, false, true, false);
chainWithInvalidEndCertificate = generateKeyChainIncludingCA(false, false, false, false);
chainWithMultipleEndCertificates = generateKeyChainIncludingCA(false, true, false, true);
chainWithValidAndInvalidEndCertificates = generateKeyChainIncludingCA(false, true, true, false);
}
@Test
void testNeverExpiringX509Certificate() throws Exception {
final KeyPair keyPair = TestSslUtils.generateKeyPair("RSA");
final String dn = "CN=Test, L=London, C=GB";
// Create and initialize data structures
int nrOfCerts = 5;
X509Certificate[] testCerts = new X509Certificate[nrOfCerts];
PublicKey[] signedWith = new PublicKey[nrOfCerts];
boolean[] expectValidEndCert = new boolean[nrOfCerts];
final int days = 1;
// Generate valid certificate
testCerts[0] = TestSslUtils.generateCertificate(dn, keyPair, days, "SHA512withRSA");
// Self-signed
signedWith[0] = testCerts[0].getPublicKey();
expectValidEndCert[0] = true;
// Generate expired, but valid certificate
testCerts[1] = TestSslUtils.generateCertificate(dn, keyPair, -days, "SHA512withRSA");
// Self-signed
signedWith[1] = testCerts[1].getPublicKey();
expectValidEndCert[1] = true;
// Use existing real certificate chain, where the end certificate (the first on
// in the
// chain) is valid
testCerts[2] = chainWithValidEndCertificate[0];
// The end certificate must be signed by the intermediate CA public key
signedWith[2] = chainWithValidEndCertificate[1].getPublicKey();
expectValidEndCert[2] = true;
// Use existing real certificate chain, where the end certificate (the first on
// in the
// chain) is expired
testCerts[3] = chainWithExpiredEndCertificate[0];
// The end certificate must be signed by the intermediate CA public key
signedWith[3] = chainWithExpiredEndCertificate[1].getPublicKey();
expectValidEndCert[3] = true;
// Test with invalid certificate
testCerts[4] = chainWithInvalidEndCertificate[0];
// Check whether this certificate is signed by the intermediate certificate in
// our chain (it is not)
signedWith[4] = chainWithInvalidEndCertificate[1].getPublicKey();
expectValidEndCert[4] = false;
for (int i = 0; i < testCerts.length; i++) {
X509Certificate cert = testCerts[i];
final NeverExpiringX509Certificate wrappedCert = new NeverExpiringX509Certificate(
cert);
// All results must be identically for original as well as wrapped certificate
// class
assertEquals(cert.getCriticalExtensionOIDs(), wrappedCert.getCriticalExtensionOIDs());
final String testOid = "2.5.29.14"; // Should not be in test certificate
assertEquals(cert.getExtensionValue(testOid), wrappedCert.getExtensionValue(testOid));
assertEquals(cert.getNonCriticalExtensionOIDs(),
wrappedCert.getNonCriticalExtensionOIDs());
assertEquals(cert.hasUnsupportedCriticalExtension(),
wrappedCert.hasUnsupportedCriticalExtension());
// We have just generated a valid test certificate, it should still be valid now
assertEquals(cert.getBasicConstraints(), wrappedCert.getBasicConstraints());
assertEquals(cert.getIssuerDN(), wrappedCert.getIssuerDN());
assertEquals(cert.getIssuerUniqueID(), wrappedCert.getIssuerUniqueID());
assertEquals(cert.getKeyUsage(), wrappedCert.getKeyUsage());
assertEquals(cert.getNotAfter(), wrappedCert.getNotAfter());
assertEquals(cert.getNotBefore(), wrappedCert.getNotBefore());
assertEquals(cert.getSerialNumber(), wrappedCert.getSerialNumber());
assertEquals(cert.getSigAlgName(), wrappedCert.getSigAlgName());
assertEquals(cert.getSigAlgOID(), wrappedCert.getSigAlgOID());
assertArrayEquals(cert.getSigAlgParams(), wrappedCert.getSigAlgParams());
assertArrayEquals(cert.getSignature(), wrappedCert.getSignature());
assertEquals(cert.getSubjectDN(), wrappedCert.getSubjectDN());
assertEquals(cert.getSubjectUniqueID(), wrappedCert.getSubjectUniqueID());
assertArrayEquals(cert.getTBSCertificate(), wrappedCert.getTBSCertificate());
assertEquals(cert.getVersion(), wrappedCert.getVersion());
assertArrayEquals(cert.getEncoded(), wrappedCert.getEncoded());
assertEquals(cert.getPublicKey(), wrappedCert.getPublicKey());
assertEquals(cert.toString(), wrappedCert.toString());
final PublicKey signingKey = signedWith[i];
if (expectValidEndCert[i]) {
assertDoesNotThrow(() -> cert.verify(signingKey));
assertDoesNotThrow(() -> wrappedCert.verify(signingKey));
} else {
Exception origException = assertThrows(SignatureException.class, () -> cert.verify(signingKey));
Exception testException = assertThrows(SignatureException.class, () -> wrappedCert.verify(signingKey));
assertEquals(origException.getMessage(), testException.getMessage());
}
// Test timing now, starting with "now"
Date dateNow = new Date();
if (cert.getNotBefore().before(dateNow) && cert.getNotAfter().after(dateNow)) {
assertDoesNotThrow(() -> cert.checkValidity());
} else {
assertThrows(CertificateException.class, cert::checkValidity);
}
// The wrappedCert must never throw due to being expired
assertDoesNotThrow(() -> wrappedCert.checkValidity());
if (cert.getNotBefore().before(dateNow) && cert.getNotAfter().after(dateNow)) {
assertDoesNotThrow(() -> cert.checkValidity(dateNow));
} else {
assertThrows(CertificateException.class, () -> cert.checkValidity(dateNow));
}
// wrapped cert must not throw even if it is expired
assertDoesNotThrow(() -> wrappedCert.checkValidity(dateNow));
// Test with (days/2) before now.
Date dateRecentPast = new Date(System.currentTimeMillis() - days * 12 * 60 * 60 * 1000);
if (cert.getNotBefore().before(dateRecentPast)
&& cert.getNotAfter().after(dateRecentPast)) {
assertDoesNotThrow(() -> cert.checkValidity(dateRecentPast));
assertDoesNotThrow(() -> wrappedCert.checkValidity(dateRecentPast));
} else {
// Cert not valid yet
assertThrows(CertificateException.class,
() -> cert.checkValidity(dateRecentPast));
// The wrappend certificate class does not check dates at all
assertDoesNotThrow(() -> wrappedCert.checkValidity(dateRecentPast));
}
// Test with (days+1) before now. Both certificates were not yet valid, thus
// both checks
// must throw
Date datePast = new Date(System.currentTimeMillis() - (days + 2) * 24 * 60 * 60 * 1000);
assertThrows(CertificateException.class, () -> cert.checkValidity(datePast));
// The wrappend certificate class does not check dates at all
assertDoesNotThrow(() -> wrappedCert.checkValidity(datePast));
// Test with "days+2" after now.
// Cert is not valid anymore. The original class must throw
Date dateFuture = new Date(System.currentTimeMillis() + (days + 2) * 24 * 60 * 60 * 1000);
assertThrows(CertificateException.class, () -> cert.checkValidity(dateFuture));
// This checks the only deviation in behavior of the
// NeverExpiringX509Certificate
// compared to the standard Certificate:
// The NeverExpiringX509Certificate will report any expired certificate as still
// valid
assertDoesNotThrow(() -> wrappedCert.checkValidity(dateFuture));
}
}
private static X509TrustManager getX509TrustManager(TrustManagerFactory tmf) throws Exception {
for (TrustManager trustManager : tmf.getTrustManagers()) {
if (trustManager instanceof X509TrustManager) {
return (X509TrustManager) trustManager;
}
}
throw new Exception("Unable to find X509TrustManager");
}
@Test
public void testCommonNameLoggingTrustManagerFactoryWrapper() throws Exception {
// We need to construct a trust store for testing
X509Certificate caCert = chainWithValidEndCertificate[2];
KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType());
trustStore.load(null, null);
trustStore.setCertificateEntry("CA", caCert);
String kmfAlgorithm = TrustManagerFactory.getDefaultAlgorithm();
TrustManagerFactory origTmFactory = TrustManagerFactory.getInstance(kmfAlgorithm);
origTmFactory.init(trustStore);
TrustManager[] origTrustManagers = origTmFactory.getTrustManagers();
// Create wrapped trust manager factory
CommonNameLoggingTrustManagerFactoryWrapper testTmFactory = CommonNameLoggingTrustManagerFactoryWrapper.getInstance(kmfAlgorithm);
testTmFactory.init(trustStore);
TrustManager[] wrappendTrustManagers = testTmFactory.getTrustManagers();
// Number of trust managers must be equal (usually "1")
assertEquals(origTrustManagers.length, wrappendTrustManagers.length);
// Algorithms must be equal
assertEquals(origTmFactory.getAlgorithm(), testTmFactory.getAlgorithm());
// Compare trust managers. Only for X509 there must be a difference
for (int i = 0; i < origTrustManagers.length; i++) {
TrustManager origTrustManager = origTrustManagers[i];
TrustManager testTrustManager = wrappendTrustManagers[i];
if (origTrustManager instanceof X509TrustManager) {
assertInstanceOf(CommonNameLoggingTrustManager.class, testTrustManager);
CommonNameLoggingTrustManager commonNameLoggingTrustManager = (CommonNameLoggingTrustManager) testTrustManager;
// Two different instances of X509TrustManager wouldn't be considered equal. Thus we at least check that their classes are equal
assertEquals(origTrustManager.getClass(), commonNameLoggingTrustManager.getOriginalTrustManager().getClass());
} else {
// Two different instances of X509TrustManager wouldn't be considered equal. Thus we at least check that their classes are equal
assertEquals(origTrustManager.getClass(), testTrustManager.getClass());
}
}
}
@Test
public void testCommonNameLoggingTrustManagerValidChain() throws Exception {
X509Certificate endCert = chainWithValidEndCertificate[0];
X509Certificate intermediateCert = chainWithValidEndCertificate[1];
X509Certificate caCert = chainWithValidEndCertificate[2];
X509Certificate[] chainWithoutCa = new X509Certificate[] {endCert, intermediateCert};
KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType());
trustStore.load(null, null);
trustStore.setCertificateEntry("CA", caCert);
String tmfAlgorithm = TrustManagerFactory.getDefaultAlgorithm();
TrustManagerFactory tmf = TrustManagerFactory.getInstance(tmfAlgorithm);
tmf.init(trustStore);
final X509TrustManager origTrustManager = getX509TrustManager(tmf);
try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(CommonNameLoggingSslEngineFactory.class)) {
int nrOfInitialMessagges = appender.getMessages().size();
CommonNameLoggingTrustManager testTrustManager = new CommonNameLoggingTrustManager(origTrustManager, 2);
// Check client certificate first
assertEquals(testTrustManager.getOriginalTrustManager(), origTrustManager);
assertDoesNotThrow(() -> origTrustManager.checkClientTrusted(chainWithoutCa, "RSA"));
assertDoesNotThrow(() -> testTrustManager.checkClientTrusted(chainWithoutCa, "RSA"));
assertEquals(nrOfInitialMessagges, appender.getMessages().size());
// Check the same client certificate again. Expect the exact same behavior as before
assertEquals(testTrustManager.getOriginalTrustManager(), origTrustManager);
assertDoesNotThrow(() -> origTrustManager.checkClientTrusted(chainWithoutCa, "RSA"));
assertDoesNotThrow(() -> testTrustManager.checkClientTrusted(chainWithoutCa, "RSA"));
assertEquals(nrOfInitialMessagges, appender.getMessages().size());
// Check server certificate (no changes here)
assertDoesNotThrow(() -> origTrustManager.checkServerTrusted(chainWithoutCa, "RSA"));
assertDoesNotThrow(() -> testTrustManager.checkServerTrusted(chainWithoutCa, "RSA"));
assertEquals(nrOfInitialMessagges, appender.getMessages().size());
assertArrayEquals(origTrustManager.getAcceptedIssuers(), testTrustManager.getAcceptedIssuers());
}
}
@Test
public void testCommonNameLoggingTrustManagerValidChainWithCA() throws Exception {
X509Certificate endCert = chainWithValidEndCertificate[0];
X509Certificate intermediateCert = chainWithValidEndCertificate[1];
X509Certificate caCert = chainWithValidEndCertificate[2];
X509Certificate[] chainWitCa = new X509Certificate[] {endCert, intermediateCert, caCert};
KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType());
trustStore.load(null, null);
trustStore.setCertificateEntry("CA", caCert);
String tmfAlgorithm = TrustManagerFactory.getDefaultAlgorithm();
TrustManagerFactory tmf = TrustManagerFactory.getInstance(tmfAlgorithm);
tmf.init(trustStore);
final X509TrustManager origTrustManager = getX509TrustManager(tmf);
try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(CommonNameLoggingSslEngineFactory.class)) {
int nrOfInitialMessagges = appender.getMessages().size();
CommonNameLoggingTrustManager testTrustManager = new CommonNameLoggingTrustManager(origTrustManager, 2);
assertEquals(testTrustManager.getOriginalTrustManager(), origTrustManager);
assertDoesNotThrow(() -> origTrustManager.checkClientTrusted(chainWitCa, "RSA"));
assertDoesNotThrow(() -> testTrustManager.checkClientTrusted(chainWitCa, "RSA"));
assertEquals(nrOfInitialMessagges, appender.getMessages().size());
assertDoesNotThrow(() -> origTrustManager.checkServerTrusted(chainWitCa, "RSA"));
assertDoesNotThrow(() -> testTrustManager.checkServerTrusted(chainWitCa, "RSA"));
assertEquals(nrOfInitialMessagges, appender.getMessages().size());
assertArrayEquals(origTrustManager.getAcceptedIssuers(), testTrustManager.getAcceptedIssuers());
}
}
@Test
public void testCommonNameLoggingTrustManagerWithInvalidEndCert() throws Exception {
X509Certificate endCert = chainWithInvalidEndCertificate[0];
X509Certificate intermediateCert = chainWithInvalidEndCertificate[1];
X509Certificate caCert = chainWithInvalidEndCertificate[2];
X509Certificate[] chainWithoutCa = new X509Certificate[] {endCert, intermediateCert};
KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType());
trustStore.load(null, null);
trustStore.setCertificateEntry("CA", caCert);
String tmfAlgorithm = TrustManagerFactory.getDefaultAlgorithm();
TrustManagerFactory tmf = TrustManagerFactory.getInstance(tmfAlgorithm);
tmf.init(trustStore);
final X509TrustManager origTrustManager = getX509TrustManager(tmf);
try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(CommonNameLoggingSslEngineFactory.class)) {
int nrOfInitialMessagges = appender.getMessages().size();
CommonNameLoggingTrustManager testTrustManager = new CommonNameLoggingTrustManager(origTrustManager, 2);
// Check client certificate
assertEquals(testTrustManager.getOriginalTrustManager(), origTrustManager);
Exception origException = assertThrows(CertificateException.class,
() -> origTrustManager.checkClientTrusted(chainWithoutCa, "RSA"));
Exception testException = assertThrows(CertificateException.class,
() -> testTrustManager.checkClientTrusted(chainWithoutCa, "RSA"));
assertEquals(origException.getMessage(), testException.getMessage());
assertEquals(nrOfInitialMessagges, appender.getMessages().size());
// Check the client certificate again, expecting the exact same result
assertEquals(testTrustManager.getOriginalTrustManager(), origTrustManager);
origException = assertThrows(CertificateException.class,
() -> origTrustManager.checkClientTrusted(chainWithoutCa, "RSA"));
testException = assertThrows(CertificateException.class,
() -> testTrustManager.checkClientTrusted(chainWithoutCa, "RSA"));
assertEquals(origException.getMessage(), testException.getMessage());
assertEquals(nrOfInitialMessagges, appender.getMessages().size());
// Check server certificate
origException = assertThrows(CertificateException.class,
() -> origTrustManager.checkServerTrusted(chainWithoutCa, "RSA"));
testException = assertThrows(CertificateException.class,
() -> testTrustManager.checkServerTrusted(chainWithoutCa, "RSA"));
assertEquals(origException.getMessage(), testException.getMessage());
assertEquals(nrOfInitialMessagges, appender.getMessages().size());
assertArrayEquals(origTrustManager.getAcceptedIssuers(), testTrustManager.getAcceptedIssuers());
}
}
@Test
public void testCommonNameLoggingTrustManagerWithExpiredEndCert() throws Exception {
X509Certificate endCert = chainWithExpiredEndCertificate[0];
X509Certificate intermediateCert = chainWithExpiredEndCertificate[1];
X509Certificate caCert = chainWithExpiredEndCertificate[2];
X509Certificate[] chainWithoutCa = new X509Certificate[] {endCert, intermediateCert};
KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType());
trustStore.load(null, null);
trustStore.setCertificateEntry("CA", caCert);
String tmfAlgorithm = TrustManagerFactory.getDefaultAlgorithm();
TrustManagerFactory tmf = TrustManagerFactory.getInstance(tmfAlgorithm);
tmf.init(trustStore);
final X509TrustManager origTrustManager = getX509TrustManager(tmf);
try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(CommonNameLoggingTrustManagerFactoryWrapper.class)) {
int nrOfInitialMessagges = appender.getMessages().size();
CommonNameLoggingTrustManager testTrustManager = new CommonNameLoggingTrustManager(origTrustManager, 2);
assertEquals(origTrustManager, testTrustManager.getOriginalTrustManager());
// Call original method, then method of wrapped trust manager and compare result
Exception origException = assertThrows(CertificateException.class,
() -> origTrustManager.checkClientTrusted(chainWithoutCa, "RSA"));
Exception testException = assertThrows(CertificateException.class,
() -> testTrustManager.checkClientTrusted(chainWithoutCa, "RSA"));
assertEquals(origException.getMessage(), testException.getMessage());
// Check that there is exactly one new message
List<String> logMessages = appender.getMessages();
assertEquals(nrOfInitialMessagges + 1, logMessages.size());
assertEquals("Certificate with common name \"" + endCert.getSubjectX500Principal() +
"\" expired on " + endCert.getNotAfter(), logMessages.get(logMessages.size() - 1));
// Call original method, then method of wrapped trust manager and compare result
origException = assertThrows(CertificateException.class,
() -> testTrustManager.checkServerTrusted(chainWithoutCa, "RSA"));
testException = assertThrows(CertificateException.class,
() -> testTrustManager.checkServerTrusted(chainWithoutCa, "RSA"));
assertEquals(origException.getMessage(), testException.getMessage());
// Check that there are no new messages
assertEquals(nrOfInitialMessagges + 1, appender.getMessages().size());
assertArrayEquals(origTrustManager.getAcceptedIssuers(), testTrustManager.getAcceptedIssuers());
}
}
@Test
public void testCommonNameLoggingTrustManagerWithExpiredEndCertWithCA() throws Exception {
X509Certificate endCert = chainWithExpiredEndCertificate[0];
X509Certificate intermediateCert = chainWithExpiredEndCertificate[1];
X509Certificate caCert = chainWithExpiredEndCertificate[2];
X509Certificate[] chainWithoutCa = new X509Certificate[] {endCert, intermediateCert, caCert};
KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType());
trustStore.load(null, null);
trustStore.setCertificateEntry("CA", caCert);
String tmfAlgorithm = TrustManagerFactory.getDefaultAlgorithm();
TrustManagerFactory tmf = TrustManagerFactory.getInstance(tmfAlgorithm);
tmf.init(trustStore);
final X509TrustManager origTrustManager = getX509TrustManager(tmf);
try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(CommonNameLoggingTrustManagerFactoryWrapper.class)) {
int nrOfInitialMessagges = appender.getMessages().size();
CommonNameLoggingTrustManager testTrustManager = new CommonNameLoggingTrustManager(origTrustManager, 2);
assertEquals(origTrustManager, testTrustManager.getOriginalTrustManager());
// Call original method, then method of wrapped trust manager and compare result
Exception origException = assertThrows(CertificateException.class,
() -> origTrustManager.checkClientTrusted(chainWithoutCa, "RSA"));
Exception testException = assertThrows(CertificateException.class,
() -> testTrustManager.checkClientTrusted(chainWithoutCa, "RSA"));
assertEquals(origException.getMessage(), testException.getMessage());
// Check that there is exactly one new message
List<String> logMessages = appender.getMessages();
assertEquals(nrOfInitialMessagges + 1, logMessages.size());
assertEquals("Certificate with common name \"" + endCert.getSubjectX500Principal() +
"\" expired on " + endCert.getNotAfter(), logMessages.get(logMessages.size() - 1));
// Note: As there are multiple SSLContext created within Kafka, the message may be logged multiple times
// Check validation of server certificates, then method of wrapped trust manager and compare result
origException = assertThrows(CertificateException.class,
() -> testTrustManager.checkServerTrusted(chainWithoutCa, "RSA"));
testException = assertThrows(CertificateException.class,
() -> testTrustManager.checkServerTrusted(chainWithoutCa, "RSA"));
assertEquals(origException.getMessage(), testException.getMessage());
// Check that there are no new messages
assertEquals(nrOfInitialMessagges + 1, appender.getMessages().size());
assertArrayEquals(origTrustManager.getAcceptedIssuers(), testTrustManager.getAcceptedIssuers());
}
}
@Test
public void testCommonNameLoggingTrustManagerMixValidAndInvalidCertificates() throws Exception {
// Setup certificate chain with expired end certificate
X509Certificate endCertValid = chainWithValidAndInvalidEndCertificates[0];
X509Certificate endCertInvalid = chainWithValidAndInvalidEndCertificates[1];
X509Certificate intermediateCert = chainWithValidAndInvalidEndCertificates[2];
X509Certificate caCert = chainWithValidAndInvalidEndCertificates[3];
X509Certificate[] validChainWithoutCa = new X509Certificate[] {endCertValid, intermediateCert};
X509Certificate[] invalidChainWithoutCa = new X509Certificate[] {endCertInvalid, intermediateCert};
// Setup certificate chain with valid end certificate
KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType());
trustStore.load(null, null);
trustStore.setCertificateEntry("CA", caCert);
String tmfAlgorithm = TrustManagerFactory.getDefaultAlgorithm();
TrustManagerFactory tmf = TrustManagerFactory.getInstance(tmfAlgorithm);
tmf.init(trustStore);
final X509TrustManager origTrustManager = getX509TrustManager(tmf);
try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(CommonNameLoggingSslEngineFactory.class)) {
CommonNameLoggingTrustManager testTrustManager = new CommonNameLoggingTrustManager(origTrustManager, 2);
// Call with valid certificate
assertDoesNotThrow(() -> testTrustManager.checkClientTrusted(validChainWithoutCa, "RSA"));
// Call with invalid certificate
assertThrows(CertificateException.class,
() -> testTrustManager.checkClientTrusted(invalidChainWithoutCa, "RSA"));
// Call with valid certificate again
assertDoesNotThrow(() -> testTrustManager.checkClientTrusted(validChainWithoutCa, "RSA"));
// Call with invalid certificate
assertThrows(CertificateException.class,
() -> testTrustManager.checkClientTrusted(invalidChainWithoutCa, "RSA"));
// Call with valid certificate again
assertDoesNotThrow(() -> testTrustManager.checkClientTrusted(validChainWithoutCa, "RSA"));
}
}
@Test
public void testSortChainAnWrapEndCertificate() {
// Calling method with null or empty chain is expected to throw
assertThrows(CertificateException.class,
() -> CommonNameLoggingTrustManager.sortChainAnWrapEndCertificate(null));
assertThrows(CertificateException.class,
() -> CommonNameLoggingTrustManager.sortChainAnWrapEndCertificate(new X509Certificate[0]));
X509Certificate endCert = chainWithExpiredEndCertificate[0];
X509Certificate intermediateCert = chainWithExpiredEndCertificate[1];
X509Certificate caCert = chainWithExpiredEndCertificate[2];
// Check that a chain with just one certificate works
X509Certificate[] chainWithEndCert = new X509Certificate[] {endCert};
X509Certificate[] sortedChain = assertDoesNotThrow(
() -> CommonNameLoggingTrustManager.sortChainAnWrapEndCertificate(chainWithEndCert));
assertEquals(endCert.getSubjectX500Principal(), sortedChain[0].getSubjectX500Principal());
// Check that the order is unchanged for an already sorted certificate chain
// (starting with end certificate)
X509Certificate[] chainWithoutCaInOrder = new X509Certificate[] {endCert, intermediateCert};
sortedChain = assertDoesNotThrow(
() -> CommonNameLoggingTrustManager.sortChainAnWrapEndCertificate(chainWithoutCaInOrder));
assertEquals(endCert.getSubjectX500Principal(), sortedChain[0].getSubjectX500Principal());
assertEquals(intermediateCert.getSubjectX500Principal(), sortedChain[1].getSubjectX500Principal());
// Check that the order is changed for an unsorted certificate chain such that
// it starts with end certificate
X509Certificate[] chainWithoutCaOutOfOrder = new X509Certificate[] {intermediateCert, endCert};
sortedChain = assertDoesNotThrow(
() -> CommonNameLoggingTrustManager.sortChainAnWrapEndCertificate(chainWithoutCaOutOfOrder));
assertEquals(endCert.getSubjectX500Principal(), sortedChain[0].getSubjectX500Principal());
assertEquals(intermediateCert.getSubjectX500Principal(), sortedChain[1].getSubjectX500Principal());
X509Certificate[] chainWithCaOutOfOrder = new X509Certificate[] {caCert, intermediateCert, endCert};
sortedChain = assertDoesNotThrow(
() -> CommonNameLoggingTrustManager.sortChainAnWrapEndCertificate(chainWithCaOutOfOrder));
assertEquals(endCert.getSubjectX500Principal(), sortedChain[0].getSubjectX500Principal());
assertEquals(intermediateCert.getSubjectX500Principal(), sortedChain[1].getSubjectX500Principal());
assertEquals(caCert.getSubjectX500Principal(), sortedChain[2].getSubjectX500Principal());
}
@Test
public void testSortChainWithMultipleEndCertificate() {
assertThrows(CertificateException.class,
() -> CommonNameLoggingTrustManager.sortChainAnWrapEndCertificate(chainWithMultipleEndCertificates));
}
@Test
public void testCalcDigestForCertificateChain() {
ByteBuffer digestForValidChain =
assertDoesNotThrow(() -> CommonNameLoggingTrustManager.calcDigestForCertificateChain(chainWithValidEndCertificate));
ByteBuffer digestForValidChainAgain =
assertDoesNotThrow(() -> CommonNameLoggingTrustManager.calcDigestForCertificateChain(chainWithValidEndCertificate));
assertEquals(digestForValidChain, digestForValidChainAgain);
ByteBuffer digestForInvalidChain =
assertDoesNotThrow(() -> CommonNameLoggingTrustManager.calcDigestForCertificateChain(chainWithInvalidEndCertificate));
assertNotEquals(digestForValidChain, digestForInvalidChain);
ByteBuffer digestForExpiredChain =
assertDoesNotThrow(() -> CommonNameLoggingTrustManager.calcDigestForCertificateChain(chainWithExpiredEndCertificate));
assertNotEquals(digestForValidChain, digestForExpiredChain);
assertNotEquals(digestForInvalidChain, digestForExpiredChain);
}
/**
* This helper method generates a valid key chain with one end entity
* (client/server cert), one intermediate certificate authority and one
* root certificate authority (self-signed)
*
* @return
* @throws CertificateException
* @throws NoSuchAlgorithmException
*/
private X509Certificate[] generateKeyChainIncludingCA(boolean expired, boolean multipleEndCert, boolean endCert0Valid, boolean endCert1Valid)
throws CertificateException, NoSuchAlgorithmException {
// For testing, we might create another end certificate
int nrOfCerts = multipleEndCert ? 4 : 3;
KeyPair[] keyPairs = new KeyPair[nrOfCerts];
for (int i = 0; i < nrOfCerts; i++) {
keyPairs[i] = TestSslUtils.generateKeyPair("RSA");
}
X509Certificate[] certs = new X509Certificate[nrOfCerts];
int endCertDaysValidBeforeNow = 1;
// If using 0 or a negative value, the generated certificate will be expired
int endCertDaysValidAfterNow = expired ? 0 : 1;
// Generate root CA
int caIndex = nrOfCerts - 1;
certs[caIndex] = TestSslUtils.generateSignedCertificate("CN=CA", keyPairs[caIndex], 365,
365, null, null, "SHA512withRSA", true, false, false);
int intermediateCertIndex = caIndex - 1;
certs[intermediateCertIndex] = TestSslUtils.generateSignedCertificate("CN=Intermediate CA",
keyPairs[intermediateCertIndex], 365, 365, certs[caIndex].getSubjectX500Principal().getName(), keyPairs[caIndex],
"SHA512withRSA", true, false, false);
for (int currIndex = intermediateCertIndex - 1; currIndex >= 0; currIndex--) {
// When generating multiple end certificates,
boolean endCertValid = (currIndex == 0) ? endCert0Valid : endCert1Valid;
if (endCertValid) {
// Generate a valid end certificate, i.e. one that is signed by our intermediate
// CA
certs[currIndex] = TestSslUtils.generateSignedCertificate("CN=kafka", keyPairs[currIndex],
endCertDaysValidBeforeNow, endCertDaysValidAfterNow,
certs[intermediateCertIndex].getSubjectX500Principal().getName(), keyPairs[intermediateCertIndex], "SHA512withRSA", false, true, true);
} else {
// Generate an invalid end certificate, by creating a self-signed one.
certs[currIndex] = TestSslUtils.generateSignedCertificate("C=GB, L=London, CN=kafka", keyPairs[currIndex],
endCertDaysValidBeforeNow, endCertDaysValidAfterNow,
null, null, "SHA512withRSA", false, true, true);
}
}
return certs;
}
}
|
googleapis/google-cloud-java | 36,020 | java-dlp/proto-google-cloud-dlp-v2/src/main/java/com/google/privacy/dlp/v2/InfoType.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/privacy/dlp/v2/storage.proto
// Protobuf Java Version: 3.25.8
package com.google.privacy.dlp.v2;
/**
*
*
* <pre>
* Type of information detected by the API.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2.InfoType}
*/
public final class InfoType extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.privacy.dlp.v2.InfoType)
InfoTypeOrBuilder {
private static final long serialVersionUID = 0L;
// Use InfoType.newBuilder() to construct.
private InfoType(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private InfoType() {
name_ = "";
version_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new InfoType();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.privacy.dlp.v2.DlpStorage
.internal_static_google_privacy_dlp_v2_InfoType_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2.DlpStorage
.internal_static_google_privacy_dlp_v2_InfoType_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2.InfoType.class,
com.google.privacy.dlp.v2.InfoType.Builder.class);
}
private int bitField0_;
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Name of the information type. Either a name of your choosing when
* creating a CustomInfoType, or one of the names listed
* at
* https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference
* when specifying a built-in type. When sending Cloud DLP results to Data
* Catalog, infoType names should conform to the pattern
* `[A-Za-z0-9$_-]{1,64}`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of the information type. Either a name of your choosing when
* creating a CustomInfoType, or one of the names listed
* at
* https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference
* when specifying a built-in type. When sending Cloud DLP results to Data
* Catalog, infoType names should conform to the pattern
* `[A-Za-z0-9$_-]{1,64}`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int VERSION_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object version_ = "";
/**
*
*
* <pre>
* Optional version name for this InfoType.
* </pre>
*
* <code>string version = 2;</code>
*
* @return The version.
*/
@java.lang.Override
public java.lang.String getVersion() {
java.lang.Object ref = version_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
version_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional version name for this InfoType.
* </pre>
*
* <code>string version = 2;</code>
*
* @return The bytes for version.
*/
@java.lang.Override
public com.google.protobuf.ByteString getVersionBytes() {
java.lang.Object ref = version_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
version_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SENSITIVITY_SCORE_FIELD_NUMBER = 3;
private com.google.privacy.dlp.v2.SensitivityScore sensitivityScore_;
/**
*
*
* <pre>
* Optional custom sensitivity for this InfoType.
* This only applies to data profiling.
* </pre>
*
* <code>.google.privacy.dlp.v2.SensitivityScore sensitivity_score = 3;</code>
*
* @return Whether the sensitivityScore field is set.
*/
@java.lang.Override
public boolean hasSensitivityScore() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional custom sensitivity for this InfoType.
* This only applies to data profiling.
* </pre>
*
* <code>.google.privacy.dlp.v2.SensitivityScore sensitivity_score = 3;</code>
*
* @return The sensitivityScore.
*/
@java.lang.Override
public com.google.privacy.dlp.v2.SensitivityScore getSensitivityScore() {
return sensitivityScore_ == null
? com.google.privacy.dlp.v2.SensitivityScore.getDefaultInstance()
: sensitivityScore_;
}
/**
*
*
* <pre>
* Optional custom sensitivity for this InfoType.
* This only applies to data profiling.
* </pre>
*
* <code>.google.privacy.dlp.v2.SensitivityScore sensitivity_score = 3;</code>
*/
@java.lang.Override
public com.google.privacy.dlp.v2.SensitivityScoreOrBuilder getSensitivityScoreOrBuilder() {
return sensitivityScore_ == null
? com.google.privacy.dlp.v2.SensitivityScore.getDefaultInstance()
: sensitivityScore_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, version_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getSensitivityScore());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, version_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getSensitivityScore());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.privacy.dlp.v2.InfoType)) {
return super.equals(obj);
}
com.google.privacy.dlp.v2.InfoType other = (com.google.privacy.dlp.v2.InfoType) obj;
if (!getName().equals(other.getName())) return false;
if (!getVersion().equals(other.getVersion())) return false;
if (hasSensitivityScore() != other.hasSensitivityScore()) return false;
if (hasSensitivityScore()) {
if (!getSensitivityScore().equals(other.getSensitivityScore())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + VERSION_FIELD_NUMBER;
hash = (53 * hash) + getVersion().hashCode();
if (hasSensitivityScore()) {
hash = (37 * hash) + SENSITIVITY_SCORE_FIELD_NUMBER;
hash = (53 * hash) + getSensitivityScore().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.privacy.dlp.v2.InfoType parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.InfoType parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.InfoType parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.InfoType parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.InfoType parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.InfoType parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.InfoType parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.InfoType parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2.InfoType parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.InfoType parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2.InfoType parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.InfoType parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.privacy.dlp.v2.InfoType prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Type of information detected by the API.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2.InfoType}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2.InfoType)
com.google.privacy.dlp.v2.InfoTypeOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.privacy.dlp.v2.DlpStorage
.internal_static_google_privacy_dlp_v2_InfoType_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2.DlpStorage
.internal_static_google_privacy_dlp_v2_InfoType_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2.InfoType.class,
com.google.privacy.dlp.v2.InfoType.Builder.class);
}
// Construct using com.google.privacy.dlp.v2.InfoType.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getSensitivityScoreFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
version_ = "";
sensitivityScore_ = null;
if (sensitivityScoreBuilder_ != null) {
sensitivityScoreBuilder_.dispose();
sensitivityScoreBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.privacy.dlp.v2.DlpStorage
.internal_static_google_privacy_dlp_v2_InfoType_descriptor;
}
@java.lang.Override
public com.google.privacy.dlp.v2.InfoType getDefaultInstanceForType() {
return com.google.privacy.dlp.v2.InfoType.getDefaultInstance();
}
@java.lang.Override
public com.google.privacy.dlp.v2.InfoType build() {
com.google.privacy.dlp.v2.InfoType result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.privacy.dlp.v2.InfoType buildPartial() {
com.google.privacy.dlp.v2.InfoType result = new com.google.privacy.dlp.v2.InfoType(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.privacy.dlp.v2.InfoType result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.version_ = version_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.sensitivityScore_ =
sensitivityScoreBuilder_ == null ? sensitivityScore_ : sensitivityScoreBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.privacy.dlp.v2.InfoType) {
return mergeFrom((com.google.privacy.dlp.v2.InfoType) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.privacy.dlp.v2.InfoType other) {
if (other == com.google.privacy.dlp.v2.InfoType.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getVersion().isEmpty()) {
version_ = other.version_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasSensitivityScore()) {
mergeSensitivityScore(other.getSensitivityScore());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
version_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(
getSensitivityScoreFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Name of the information type. Either a name of your choosing when
* creating a CustomInfoType, or one of the names listed
* at
* https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference
* when specifying a built-in type. When sending Cloud DLP results to Data
* Catalog, infoType names should conform to the pattern
* `[A-Za-z0-9$_-]{1,64}`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of the information type. Either a name of your choosing when
* creating a CustomInfoType, or one of the names listed
* at
* https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference
* when specifying a built-in type. When sending Cloud DLP results to Data
* Catalog, infoType names should conform to the pattern
* `[A-Za-z0-9$_-]{1,64}`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of the information type. Either a name of your choosing when
* creating a CustomInfoType, or one of the names listed
* at
* https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference
* when specifying a built-in type. When sending Cloud DLP results to Data
* Catalog, infoType names should conform to the pattern
* `[A-Za-z0-9$_-]{1,64}`.
* </pre>
*
* <code>string name = 1;</code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the information type. Either a name of your choosing when
* creating a CustomInfoType, or one of the names listed
* at
* https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference
* when specifying a built-in type. When sending Cloud DLP results to Data
* Catalog, infoType names should conform to the pattern
* `[A-Za-z0-9$_-]{1,64}`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the information type. Either a name of your choosing when
* creating a CustomInfoType, or one of the names listed
* at
* https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference
* when specifying a built-in type. When sending Cloud DLP results to Data
* Catalog, infoType names should conform to the pattern
* `[A-Za-z0-9$_-]{1,64}`.
* </pre>
*
* <code>string name = 1;</code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object version_ = "";
/**
*
*
* <pre>
* Optional version name for this InfoType.
* </pre>
*
* <code>string version = 2;</code>
*
* @return The version.
*/
public java.lang.String getVersion() {
java.lang.Object ref = version_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
version_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional version name for this InfoType.
* </pre>
*
* <code>string version = 2;</code>
*
* @return The bytes for version.
*/
public com.google.protobuf.ByteString getVersionBytes() {
java.lang.Object ref = version_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
version_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional version name for this InfoType.
* </pre>
*
* <code>string version = 2;</code>
*
* @param value The version to set.
* @return This builder for chaining.
*/
public Builder setVersion(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
version_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional version name for this InfoType.
* </pre>
*
* <code>string version = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearVersion() {
version_ = getDefaultInstance().getVersion();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional version name for this InfoType.
* </pre>
*
* <code>string version = 2;</code>
*
* @param value The bytes for version to set.
* @return This builder for chaining.
*/
public Builder setVersionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
version_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.privacy.dlp.v2.SensitivityScore sensitivityScore_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.privacy.dlp.v2.SensitivityScore,
com.google.privacy.dlp.v2.SensitivityScore.Builder,
com.google.privacy.dlp.v2.SensitivityScoreOrBuilder>
sensitivityScoreBuilder_;
/**
*
*
* <pre>
* Optional custom sensitivity for this InfoType.
* This only applies to data profiling.
* </pre>
*
* <code>.google.privacy.dlp.v2.SensitivityScore sensitivity_score = 3;</code>
*
* @return Whether the sensitivityScore field is set.
*/
public boolean hasSensitivityScore() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Optional custom sensitivity for this InfoType.
* This only applies to data profiling.
* </pre>
*
* <code>.google.privacy.dlp.v2.SensitivityScore sensitivity_score = 3;</code>
*
* @return The sensitivityScore.
*/
public com.google.privacy.dlp.v2.SensitivityScore getSensitivityScore() {
if (sensitivityScoreBuilder_ == null) {
return sensitivityScore_ == null
? com.google.privacy.dlp.v2.SensitivityScore.getDefaultInstance()
: sensitivityScore_;
} else {
return sensitivityScoreBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional custom sensitivity for this InfoType.
* This only applies to data profiling.
* </pre>
*
* <code>.google.privacy.dlp.v2.SensitivityScore sensitivity_score = 3;</code>
*/
public Builder setSensitivityScore(com.google.privacy.dlp.v2.SensitivityScore value) {
if (sensitivityScoreBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
sensitivityScore_ = value;
} else {
sensitivityScoreBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional custom sensitivity for this InfoType.
* This only applies to data profiling.
* </pre>
*
* <code>.google.privacy.dlp.v2.SensitivityScore sensitivity_score = 3;</code>
*/
public Builder setSensitivityScore(
com.google.privacy.dlp.v2.SensitivityScore.Builder builderForValue) {
if (sensitivityScoreBuilder_ == null) {
sensitivityScore_ = builderForValue.build();
} else {
sensitivityScoreBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional custom sensitivity for this InfoType.
* This only applies to data profiling.
* </pre>
*
* <code>.google.privacy.dlp.v2.SensitivityScore sensitivity_score = 3;</code>
*/
public Builder mergeSensitivityScore(com.google.privacy.dlp.v2.SensitivityScore value) {
if (sensitivityScoreBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& sensitivityScore_ != null
&& sensitivityScore_
!= com.google.privacy.dlp.v2.SensitivityScore.getDefaultInstance()) {
getSensitivityScoreBuilder().mergeFrom(value);
} else {
sensitivityScore_ = value;
}
} else {
sensitivityScoreBuilder_.mergeFrom(value);
}
if (sensitivityScore_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional custom sensitivity for this InfoType.
* This only applies to data profiling.
* </pre>
*
* <code>.google.privacy.dlp.v2.SensitivityScore sensitivity_score = 3;</code>
*/
public Builder clearSensitivityScore() {
bitField0_ = (bitField0_ & ~0x00000004);
sensitivityScore_ = null;
if (sensitivityScoreBuilder_ != null) {
sensitivityScoreBuilder_.dispose();
sensitivityScoreBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional custom sensitivity for this InfoType.
* This only applies to data profiling.
* </pre>
*
* <code>.google.privacy.dlp.v2.SensitivityScore sensitivity_score = 3;</code>
*/
public com.google.privacy.dlp.v2.SensitivityScore.Builder getSensitivityScoreBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getSensitivityScoreFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional custom sensitivity for this InfoType.
* This only applies to data profiling.
* </pre>
*
* <code>.google.privacy.dlp.v2.SensitivityScore sensitivity_score = 3;</code>
*/
public com.google.privacy.dlp.v2.SensitivityScoreOrBuilder getSensitivityScoreOrBuilder() {
if (sensitivityScoreBuilder_ != null) {
return sensitivityScoreBuilder_.getMessageOrBuilder();
} else {
return sensitivityScore_ == null
? com.google.privacy.dlp.v2.SensitivityScore.getDefaultInstance()
: sensitivityScore_;
}
}
/**
*
*
* <pre>
* Optional custom sensitivity for this InfoType.
* This only applies to data profiling.
* </pre>
*
* <code>.google.privacy.dlp.v2.SensitivityScore sensitivity_score = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.privacy.dlp.v2.SensitivityScore,
com.google.privacy.dlp.v2.SensitivityScore.Builder,
com.google.privacy.dlp.v2.SensitivityScoreOrBuilder>
getSensitivityScoreFieldBuilder() {
if (sensitivityScoreBuilder_ == null) {
sensitivityScoreBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.privacy.dlp.v2.SensitivityScore,
com.google.privacy.dlp.v2.SensitivityScore.Builder,
com.google.privacy.dlp.v2.SensitivityScoreOrBuilder>(
getSensitivityScore(), getParentForChildren(), isClean());
sensitivityScore_ = null;
}
return sensitivityScoreBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2.InfoType)
}
// @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.InfoType)
private static final com.google.privacy.dlp.v2.InfoType DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.privacy.dlp.v2.InfoType();
}
public static com.google.privacy.dlp.v2.InfoType getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<InfoType> PARSER =
new com.google.protobuf.AbstractParser<InfoType>() {
@java.lang.Override
public InfoType parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<InfoType> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<InfoType> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.privacy.dlp.v2.InfoType getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,086 | java-recaptchaenterprise/proto-google-cloud-recaptchaenterprise-v1/src/main/java/com/google/recaptchaenterprise/v1/FirewallPolicyAssessment.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/recaptchaenterprise/v1/recaptchaenterprise.proto
// Protobuf Java Version: 3.25.8
package com.google.recaptchaenterprise.v1;
/**
*
*
* <pre>
* Policy config assessment.
* </pre>
*
* Protobuf type {@code google.cloud.recaptchaenterprise.v1.FirewallPolicyAssessment}
*/
public final class FirewallPolicyAssessment extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.recaptchaenterprise.v1.FirewallPolicyAssessment)
FirewallPolicyAssessmentOrBuilder {
private static final long serialVersionUID = 0L;
// Use FirewallPolicyAssessment.newBuilder() to construct.
private FirewallPolicyAssessment(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private FirewallPolicyAssessment() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new FirewallPolicyAssessment();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_FirewallPolicyAssessment_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_FirewallPolicyAssessment_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.recaptchaenterprise.v1.FirewallPolicyAssessment.class,
com.google.recaptchaenterprise.v1.FirewallPolicyAssessment.Builder.class);
}
private int bitField0_;
public static final int ERROR_FIELD_NUMBER = 5;
private com.google.rpc.Status error_;
/**
*
*
* <pre>
* Output only. If the processing of a policy config fails, an error is
* populated and the firewall_policy is left empty.
* </pre>
*
* <code>.google.rpc.Status error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return Whether the error field is set.
*/
@java.lang.Override
public boolean hasError() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Output only. If the processing of a policy config fails, an error is
* populated and the firewall_policy is left empty.
* </pre>
*
* <code>.google.rpc.Status error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The error.
*/
@java.lang.Override
public com.google.rpc.Status getError() {
return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_;
}
/**
*
*
* <pre>
* Output only. If the processing of a policy config fails, an error is
* populated and the firewall_policy is left empty.
* </pre>
*
* <code>.google.rpc.Status error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
@java.lang.Override
public com.google.rpc.StatusOrBuilder getErrorOrBuilder() {
return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_;
}
public static final int FIREWALL_POLICY_FIELD_NUMBER = 8;
private com.google.recaptchaenterprise.v1.FirewallPolicy firewallPolicy_;
/**
*
*
* <pre>
* Output only. The policy that matched the request. If more than one policy
* may match, this is the first match. If no policy matches the incoming
* request, the policy field is left empty.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the firewallPolicy field is set.
*/
@java.lang.Override
public boolean hasFirewallPolicy() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Output only. The policy that matched the request. If more than one policy
* may match, this is the first match. If no policy matches the incoming
* request, the policy field is left empty.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The firewallPolicy.
*/
@java.lang.Override
public com.google.recaptchaenterprise.v1.FirewallPolicy getFirewallPolicy() {
return firewallPolicy_ == null
? com.google.recaptchaenterprise.v1.FirewallPolicy.getDefaultInstance()
: firewallPolicy_;
}
/**
*
*
* <pre>
* Output only. The policy that matched the request. If more than one policy
* may match, this is the first match. If no policy matches the incoming
* request, the policy field is left empty.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public com.google.recaptchaenterprise.v1.FirewallPolicyOrBuilder getFirewallPolicyOrBuilder() {
return firewallPolicy_ == null
? com.google.recaptchaenterprise.v1.FirewallPolicy.getDefaultInstance()
: firewallPolicy_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(5, getError());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(8, getFirewallPolicy());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(5, getError());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(8, getFirewallPolicy());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.recaptchaenterprise.v1.FirewallPolicyAssessment)) {
return super.equals(obj);
}
com.google.recaptchaenterprise.v1.FirewallPolicyAssessment other =
(com.google.recaptchaenterprise.v1.FirewallPolicyAssessment) obj;
if (hasError() != other.hasError()) return false;
if (hasError()) {
if (!getError().equals(other.getError())) return false;
}
if (hasFirewallPolicy() != other.hasFirewallPolicy()) return false;
if (hasFirewallPolicy()) {
if (!getFirewallPolicy().equals(other.getFirewallPolicy())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasError()) {
hash = (37 * hash) + ERROR_FIELD_NUMBER;
hash = (53 * hash) + getError().hashCode();
}
if (hasFirewallPolicy()) {
hash = (37 * hash) + FIREWALL_POLICY_FIELD_NUMBER;
hash = (53 * hash) + getFirewallPolicy().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.recaptchaenterprise.v1.FirewallPolicyAssessment parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.recaptchaenterprise.v1.FirewallPolicyAssessment parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.FirewallPolicyAssessment parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.recaptchaenterprise.v1.FirewallPolicyAssessment parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.FirewallPolicyAssessment parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.recaptchaenterprise.v1.FirewallPolicyAssessment parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.FirewallPolicyAssessment parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.recaptchaenterprise.v1.FirewallPolicyAssessment parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.FirewallPolicyAssessment parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.recaptchaenterprise.v1.FirewallPolicyAssessment parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.FirewallPolicyAssessment parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.recaptchaenterprise.v1.FirewallPolicyAssessment parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.recaptchaenterprise.v1.FirewallPolicyAssessment prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Policy config assessment.
* </pre>
*
* Protobuf type {@code google.cloud.recaptchaenterprise.v1.FirewallPolicyAssessment}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.recaptchaenterprise.v1.FirewallPolicyAssessment)
com.google.recaptchaenterprise.v1.FirewallPolicyAssessmentOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_FirewallPolicyAssessment_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_FirewallPolicyAssessment_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.recaptchaenterprise.v1.FirewallPolicyAssessment.class,
com.google.recaptchaenterprise.v1.FirewallPolicyAssessment.Builder.class);
}
// Construct using com.google.recaptchaenterprise.v1.FirewallPolicyAssessment.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getErrorFieldBuilder();
getFirewallPolicyFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
error_ = null;
if (errorBuilder_ != null) {
errorBuilder_.dispose();
errorBuilder_ = null;
}
firewallPolicy_ = null;
if (firewallPolicyBuilder_ != null) {
firewallPolicyBuilder_.dispose();
firewallPolicyBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_FirewallPolicyAssessment_descriptor;
}
@java.lang.Override
public com.google.recaptchaenterprise.v1.FirewallPolicyAssessment getDefaultInstanceForType() {
return com.google.recaptchaenterprise.v1.FirewallPolicyAssessment.getDefaultInstance();
}
@java.lang.Override
public com.google.recaptchaenterprise.v1.FirewallPolicyAssessment build() {
com.google.recaptchaenterprise.v1.FirewallPolicyAssessment result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.recaptchaenterprise.v1.FirewallPolicyAssessment buildPartial() {
com.google.recaptchaenterprise.v1.FirewallPolicyAssessment result =
new com.google.recaptchaenterprise.v1.FirewallPolicyAssessment(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.recaptchaenterprise.v1.FirewallPolicyAssessment result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.error_ = errorBuilder_ == null ? error_ : errorBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.firewallPolicy_ =
firewallPolicyBuilder_ == null ? firewallPolicy_ : firewallPolicyBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.recaptchaenterprise.v1.FirewallPolicyAssessment) {
return mergeFrom((com.google.recaptchaenterprise.v1.FirewallPolicyAssessment) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.recaptchaenterprise.v1.FirewallPolicyAssessment other) {
if (other == com.google.recaptchaenterprise.v1.FirewallPolicyAssessment.getDefaultInstance())
return this;
if (other.hasError()) {
mergeError(other.getError());
}
if (other.hasFirewallPolicy()) {
mergeFirewallPolicy(other.getFirewallPolicy());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 42:
{
input.readMessage(getErrorFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 42
case 66:
{
input.readMessage(getFirewallPolicyFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 66
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.rpc.Status error_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
errorBuilder_;
/**
*
*
* <pre>
* Output only. If the processing of a policy config fails, an error is
* populated and the firewall_policy is left empty.
* </pre>
*
* <code>.google.rpc.Status error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return Whether the error field is set.
*/
public boolean hasError() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Output only. If the processing of a policy config fails, an error is
* populated and the firewall_policy is left empty.
* </pre>
*
* <code>.google.rpc.Status error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The error.
*/
public com.google.rpc.Status getError() {
if (errorBuilder_ == null) {
return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_;
} else {
return errorBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Output only. If the processing of a policy config fails, an error is
* populated and the firewall_policy is left empty.
* </pre>
*
* <code>.google.rpc.Status error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder setError(com.google.rpc.Status value) {
if (errorBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
error_ = value;
} else {
errorBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. If the processing of a policy config fails, an error is
* populated and the firewall_policy is left empty.
* </pre>
*
* <code>.google.rpc.Status error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder setError(com.google.rpc.Status.Builder builderForValue) {
if (errorBuilder_ == null) {
error_ = builderForValue.build();
} else {
errorBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. If the processing of a policy config fails, an error is
* populated and the firewall_policy is left empty.
* </pre>
*
* <code>.google.rpc.Status error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder mergeError(com.google.rpc.Status value) {
if (errorBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& error_ != null
&& error_ != com.google.rpc.Status.getDefaultInstance()) {
getErrorBuilder().mergeFrom(value);
} else {
error_ = value;
}
} else {
errorBuilder_.mergeFrom(value);
}
if (error_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Output only. If the processing of a policy config fails, an error is
* populated and the firewall_policy is left empty.
* </pre>
*
* <code>.google.rpc.Status error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder clearError() {
bitField0_ = (bitField0_ & ~0x00000001);
error_ = null;
if (errorBuilder_ != null) {
errorBuilder_.dispose();
errorBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. If the processing of a policy config fails, an error is
* populated and the firewall_policy is left empty.
* </pre>
*
* <code>.google.rpc.Status error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public com.google.rpc.Status.Builder getErrorBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getErrorFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Output only. If the processing of a policy config fails, an error is
* populated and the firewall_policy is left empty.
* </pre>
*
* <code>.google.rpc.Status error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public com.google.rpc.StatusOrBuilder getErrorOrBuilder() {
if (errorBuilder_ != null) {
return errorBuilder_.getMessageOrBuilder();
} else {
return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_;
}
}
/**
*
*
* <pre>
* Output only. If the processing of a policy config fails, an error is
* populated and the firewall_policy is left empty.
* </pre>
*
* <code>.google.rpc.Status error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
getErrorFieldBuilder() {
if (errorBuilder_ == null) {
errorBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.rpc.Status,
com.google.rpc.Status.Builder,
com.google.rpc.StatusOrBuilder>(getError(), getParentForChildren(), isClean());
error_ = null;
}
return errorBuilder_;
}
private com.google.recaptchaenterprise.v1.FirewallPolicy firewallPolicy_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.recaptchaenterprise.v1.FirewallPolicy,
com.google.recaptchaenterprise.v1.FirewallPolicy.Builder,
com.google.recaptchaenterprise.v1.FirewallPolicyOrBuilder>
firewallPolicyBuilder_;
/**
*
*
* <pre>
* Output only. The policy that matched the request. If more than one policy
* may match, this is the first match. If no policy matches the incoming
* request, the policy field is left empty.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the firewallPolicy field is set.
*/
public boolean hasFirewallPolicy() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Output only. The policy that matched the request. If more than one policy
* may match, this is the first match. If no policy matches the incoming
* request, the policy field is left empty.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The firewallPolicy.
*/
public com.google.recaptchaenterprise.v1.FirewallPolicy getFirewallPolicy() {
if (firewallPolicyBuilder_ == null) {
return firewallPolicy_ == null
? com.google.recaptchaenterprise.v1.FirewallPolicy.getDefaultInstance()
: firewallPolicy_;
} else {
return firewallPolicyBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Output only. The policy that matched the request. If more than one policy
* may match, this is the first match. If no policy matches the incoming
* request, the policy field is left empty.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setFirewallPolicy(com.google.recaptchaenterprise.v1.FirewallPolicy value) {
if (firewallPolicyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
firewallPolicy_ = value;
} else {
firewallPolicyBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The policy that matched the request. If more than one policy
* may match, this is the first match. If no policy matches the incoming
* request, the policy field is left empty.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setFirewallPolicy(
com.google.recaptchaenterprise.v1.FirewallPolicy.Builder builderForValue) {
if (firewallPolicyBuilder_ == null) {
firewallPolicy_ = builderForValue.build();
} else {
firewallPolicyBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The policy that matched the request. If more than one policy
* may match, this is the first match. If no policy matches the incoming
* request, the policy field is left empty.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder mergeFirewallPolicy(com.google.recaptchaenterprise.v1.FirewallPolicy value) {
if (firewallPolicyBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& firewallPolicy_ != null
&& firewallPolicy_
!= com.google.recaptchaenterprise.v1.FirewallPolicy.getDefaultInstance()) {
getFirewallPolicyBuilder().mergeFrom(value);
} else {
firewallPolicy_ = value;
}
} else {
firewallPolicyBuilder_.mergeFrom(value);
}
if (firewallPolicy_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Output only. The policy that matched the request. If more than one policy
* may match, this is the first match. If no policy matches the incoming
* request, the policy field is left empty.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder clearFirewallPolicy() {
bitField0_ = (bitField0_ & ~0x00000002);
firewallPolicy_ = null;
if (firewallPolicyBuilder_ != null) {
firewallPolicyBuilder_.dispose();
firewallPolicyBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The policy that matched the request. If more than one policy
* may match, this is the first match. If no policy matches the incoming
* request, the policy field is left empty.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.recaptchaenterprise.v1.FirewallPolicy.Builder getFirewallPolicyBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getFirewallPolicyFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Output only. The policy that matched the request. If more than one policy
* may match, this is the first match. If no policy matches the incoming
* request, the policy field is left empty.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.recaptchaenterprise.v1.FirewallPolicyOrBuilder getFirewallPolicyOrBuilder() {
if (firewallPolicyBuilder_ != null) {
return firewallPolicyBuilder_.getMessageOrBuilder();
} else {
return firewallPolicy_ == null
? com.google.recaptchaenterprise.v1.FirewallPolicy.getDefaultInstance()
: firewallPolicy_;
}
}
/**
*
*
* <pre>
* Output only. The policy that matched the request. If more than one policy
* may match, this is the first match. If no policy matches the incoming
* request, the policy field is left empty.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.recaptchaenterprise.v1.FirewallPolicy,
com.google.recaptchaenterprise.v1.FirewallPolicy.Builder,
com.google.recaptchaenterprise.v1.FirewallPolicyOrBuilder>
getFirewallPolicyFieldBuilder() {
if (firewallPolicyBuilder_ == null) {
firewallPolicyBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.recaptchaenterprise.v1.FirewallPolicy,
com.google.recaptchaenterprise.v1.FirewallPolicy.Builder,
com.google.recaptchaenterprise.v1.FirewallPolicyOrBuilder>(
getFirewallPolicy(), getParentForChildren(), isClean());
firewallPolicy_ = null;
}
return firewallPolicyBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.recaptchaenterprise.v1.FirewallPolicyAssessment)
}
// @@protoc_insertion_point(class_scope:google.cloud.recaptchaenterprise.v1.FirewallPolicyAssessment)
private static final com.google.recaptchaenterprise.v1.FirewallPolicyAssessment DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.recaptchaenterprise.v1.FirewallPolicyAssessment();
}
public static com.google.recaptchaenterprise.v1.FirewallPolicyAssessment getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<FirewallPolicyAssessment> PARSER =
new com.google.protobuf.AbstractParser<FirewallPolicyAssessment>() {
@java.lang.Override
public FirewallPolicyAssessment parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<FirewallPolicyAssessment> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<FirewallPolicyAssessment> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.recaptchaenterprise.v1.FirewallPolicyAssessment getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,115 | java-managedkafka/proto-google-cloud-managedkafka-v1/src/main/java/com/google/cloud/managedkafka/v1/AccessConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/managedkafka/v1/resources.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.managedkafka.v1;
/**
*
*
* <pre>
* The configuration of access to the Kafka cluster.
* </pre>
*
* Protobuf type {@code google.cloud.managedkafka.v1.AccessConfig}
*/
public final class AccessConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.managedkafka.v1.AccessConfig)
AccessConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use AccessConfig.newBuilder() to construct.
private AccessConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AccessConfig() {
networkConfigs_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new AccessConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.managedkafka.v1.ResourcesProto
.internal_static_google_cloud_managedkafka_v1_AccessConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.managedkafka.v1.ResourcesProto
.internal_static_google_cloud_managedkafka_v1_AccessConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.managedkafka.v1.AccessConfig.class,
com.google.cloud.managedkafka.v1.AccessConfig.Builder.class);
}
public static final int NETWORK_CONFIGS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.managedkafka.v1.NetworkConfig> networkConfigs_;
/**
*
*
* <pre>
* Required. Virtual Private Cloud (VPC) networks that must be granted direct
* access to the Kafka cluster. Minimum of 1 network is required. Maximum 10
* networks can be specified.
* </pre>
*
* <code>
* repeated .google.cloud.managedkafka.v1.NetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.managedkafka.v1.NetworkConfig> getNetworkConfigsList() {
return networkConfigs_;
}
/**
*
*
* <pre>
* Required. Virtual Private Cloud (VPC) networks that must be granted direct
* access to the Kafka cluster. Minimum of 1 network is required. Maximum 10
* networks can be specified.
* </pre>
*
* <code>
* repeated .google.cloud.managedkafka.v1.NetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.managedkafka.v1.NetworkConfigOrBuilder>
getNetworkConfigsOrBuilderList() {
return networkConfigs_;
}
/**
*
*
* <pre>
* Required. Virtual Private Cloud (VPC) networks that must be granted direct
* access to the Kafka cluster. Minimum of 1 network is required. Maximum 10
* networks can be specified.
* </pre>
*
* <code>
* repeated .google.cloud.managedkafka.v1.NetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public int getNetworkConfigsCount() {
return networkConfigs_.size();
}
/**
*
*
* <pre>
* Required. Virtual Private Cloud (VPC) networks that must be granted direct
* access to the Kafka cluster. Minimum of 1 network is required. Maximum 10
* networks can be specified.
* </pre>
*
* <code>
* repeated .google.cloud.managedkafka.v1.NetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.managedkafka.v1.NetworkConfig getNetworkConfigs(int index) {
return networkConfigs_.get(index);
}
/**
*
*
* <pre>
* Required. Virtual Private Cloud (VPC) networks that must be granted direct
* access to the Kafka cluster. Minimum of 1 network is required. Maximum 10
* networks can be specified.
* </pre>
*
* <code>
* repeated .google.cloud.managedkafka.v1.NetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.managedkafka.v1.NetworkConfigOrBuilder getNetworkConfigsOrBuilder(
int index) {
return networkConfigs_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < networkConfigs_.size(); i++) {
output.writeMessage(1, networkConfigs_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < networkConfigs_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, networkConfigs_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.managedkafka.v1.AccessConfig)) {
return super.equals(obj);
}
com.google.cloud.managedkafka.v1.AccessConfig other =
(com.google.cloud.managedkafka.v1.AccessConfig) obj;
if (!getNetworkConfigsList().equals(other.getNetworkConfigsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getNetworkConfigsCount() > 0) {
hash = (37 * hash) + NETWORK_CONFIGS_FIELD_NUMBER;
hash = (53 * hash) + getNetworkConfigsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.managedkafka.v1.AccessConfig parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.AccessConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.AccessConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.AccessConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.AccessConfig parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.AccessConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.AccessConfig parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.AccessConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.AccessConfig parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.AccessConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.AccessConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.AccessConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.managedkafka.v1.AccessConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The configuration of access to the Kafka cluster.
* </pre>
*
* Protobuf type {@code google.cloud.managedkafka.v1.AccessConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.managedkafka.v1.AccessConfig)
com.google.cloud.managedkafka.v1.AccessConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.managedkafka.v1.ResourcesProto
.internal_static_google_cloud_managedkafka_v1_AccessConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.managedkafka.v1.ResourcesProto
.internal_static_google_cloud_managedkafka_v1_AccessConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.managedkafka.v1.AccessConfig.class,
com.google.cloud.managedkafka.v1.AccessConfig.Builder.class);
}
// Construct using com.google.cloud.managedkafka.v1.AccessConfig.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (networkConfigsBuilder_ == null) {
networkConfigs_ = java.util.Collections.emptyList();
} else {
networkConfigs_ = null;
networkConfigsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.managedkafka.v1.ResourcesProto
.internal_static_google_cloud_managedkafka_v1_AccessConfig_descriptor;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.AccessConfig getDefaultInstanceForType() {
return com.google.cloud.managedkafka.v1.AccessConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.AccessConfig build() {
com.google.cloud.managedkafka.v1.AccessConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.AccessConfig buildPartial() {
com.google.cloud.managedkafka.v1.AccessConfig result =
new com.google.cloud.managedkafka.v1.AccessConfig(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.cloud.managedkafka.v1.AccessConfig result) {
if (networkConfigsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
networkConfigs_ = java.util.Collections.unmodifiableList(networkConfigs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.networkConfigs_ = networkConfigs_;
} else {
result.networkConfigs_ = networkConfigsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.managedkafka.v1.AccessConfig result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.managedkafka.v1.AccessConfig) {
return mergeFrom((com.google.cloud.managedkafka.v1.AccessConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.managedkafka.v1.AccessConfig other) {
if (other == com.google.cloud.managedkafka.v1.AccessConfig.getDefaultInstance()) return this;
if (networkConfigsBuilder_ == null) {
if (!other.networkConfigs_.isEmpty()) {
if (networkConfigs_.isEmpty()) {
networkConfigs_ = other.networkConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureNetworkConfigsIsMutable();
networkConfigs_.addAll(other.networkConfigs_);
}
onChanged();
}
} else {
if (!other.networkConfigs_.isEmpty()) {
if (networkConfigsBuilder_.isEmpty()) {
networkConfigsBuilder_.dispose();
networkConfigsBuilder_ = null;
networkConfigs_ = other.networkConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
networkConfigsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getNetworkConfigsFieldBuilder()
: null;
} else {
networkConfigsBuilder_.addAllMessages(other.networkConfigs_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.managedkafka.v1.NetworkConfig m =
input.readMessage(
com.google.cloud.managedkafka.v1.NetworkConfig.parser(), extensionRegistry);
if (networkConfigsBuilder_ == null) {
ensureNetworkConfigsIsMutable();
networkConfigs_.add(m);
} else {
networkConfigsBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.managedkafka.v1.NetworkConfig> networkConfigs_ =
java.util.Collections.emptyList();
private void ensureNetworkConfigsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
networkConfigs_ =
new java.util.ArrayList<com.google.cloud.managedkafka.v1.NetworkConfig>(
networkConfigs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.managedkafka.v1.NetworkConfig,
com.google.cloud.managedkafka.v1.NetworkConfig.Builder,
com.google.cloud.managedkafka.v1.NetworkConfigOrBuilder>
networkConfigsBuilder_;
/**
*
*
* <pre>
* Required. Virtual Private Cloud (VPC) networks that must be granted direct
* access to the Kafka cluster. Minimum of 1 network is required. Maximum 10
* networks can be specified.
* </pre>
*
* <code>
* repeated .google.cloud.managedkafka.v1.NetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public java.util.List<com.google.cloud.managedkafka.v1.NetworkConfig> getNetworkConfigsList() {
if (networkConfigsBuilder_ == null) {
return java.util.Collections.unmodifiableList(networkConfigs_);
} else {
return networkConfigsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Required. Virtual Private Cloud (VPC) networks that must be granted direct
* access to the Kafka cluster. Minimum of 1 network is required. Maximum 10
* networks can be specified.
* </pre>
*
* <code>
* repeated .google.cloud.managedkafka.v1.NetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public int getNetworkConfigsCount() {
if (networkConfigsBuilder_ == null) {
return networkConfigs_.size();
} else {
return networkConfigsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Required. Virtual Private Cloud (VPC) networks that must be granted direct
* access to the Kafka cluster. Minimum of 1 network is required. Maximum 10
* networks can be specified.
* </pre>
*
* <code>
* repeated .google.cloud.managedkafka.v1.NetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.managedkafka.v1.NetworkConfig getNetworkConfigs(int index) {
if (networkConfigsBuilder_ == null) {
return networkConfigs_.get(index);
} else {
return networkConfigsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Required. Virtual Private Cloud (VPC) networks that must be granted direct
* access to the Kafka cluster. Minimum of 1 network is required. Maximum 10
* networks can be specified.
* </pre>
*
* <code>
* repeated .google.cloud.managedkafka.v1.NetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setNetworkConfigs(
int index, com.google.cloud.managedkafka.v1.NetworkConfig value) {
if (networkConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNetworkConfigsIsMutable();
networkConfigs_.set(index, value);
onChanged();
} else {
networkConfigsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Required. Virtual Private Cloud (VPC) networks that must be granted direct
* access to the Kafka cluster. Minimum of 1 network is required. Maximum 10
* networks can be specified.
* </pre>
*
* <code>
* repeated .google.cloud.managedkafka.v1.NetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setNetworkConfigs(
int index, com.google.cloud.managedkafka.v1.NetworkConfig.Builder builderForValue) {
if (networkConfigsBuilder_ == null) {
ensureNetworkConfigsIsMutable();
networkConfigs_.set(index, builderForValue.build());
onChanged();
} else {
networkConfigsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. Virtual Private Cloud (VPC) networks that must be granted direct
* access to the Kafka cluster. Minimum of 1 network is required. Maximum 10
* networks can be specified.
* </pre>
*
* <code>
* repeated .google.cloud.managedkafka.v1.NetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addNetworkConfigs(com.google.cloud.managedkafka.v1.NetworkConfig value) {
if (networkConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNetworkConfigsIsMutable();
networkConfigs_.add(value);
onChanged();
} else {
networkConfigsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Required. Virtual Private Cloud (VPC) networks that must be granted direct
* access to the Kafka cluster. Minimum of 1 network is required. Maximum 10
* networks can be specified.
* </pre>
*
* <code>
* repeated .google.cloud.managedkafka.v1.NetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addNetworkConfigs(
int index, com.google.cloud.managedkafka.v1.NetworkConfig value) {
if (networkConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNetworkConfigsIsMutable();
networkConfigs_.add(index, value);
onChanged();
} else {
networkConfigsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Required. Virtual Private Cloud (VPC) networks that must be granted direct
* access to the Kafka cluster. Minimum of 1 network is required. Maximum 10
* networks can be specified.
* </pre>
*
* <code>
* repeated .google.cloud.managedkafka.v1.NetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addNetworkConfigs(
com.google.cloud.managedkafka.v1.NetworkConfig.Builder builderForValue) {
if (networkConfigsBuilder_ == null) {
ensureNetworkConfigsIsMutable();
networkConfigs_.add(builderForValue.build());
onChanged();
} else {
networkConfigsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. Virtual Private Cloud (VPC) networks that must be granted direct
* access to the Kafka cluster. Minimum of 1 network is required. Maximum 10
* networks can be specified.
* </pre>
*
* <code>
* repeated .google.cloud.managedkafka.v1.NetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addNetworkConfigs(
int index, com.google.cloud.managedkafka.v1.NetworkConfig.Builder builderForValue) {
if (networkConfigsBuilder_ == null) {
ensureNetworkConfigsIsMutable();
networkConfigs_.add(index, builderForValue.build());
onChanged();
} else {
networkConfigsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. Virtual Private Cloud (VPC) networks that must be granted direct
* access to the Kafka cluster. Minimum of 1 network is required. Maximum 10
* networks can be specified.
* </pre>
*
* <code>
* repeated .google.cloud.managedkafka.v1.NetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addAllNetworkConfigs(
java.lang.Iterable<? extends com.google.cloud.managedkafka.v1.NetworkConfig> values) {
if (networkConfigsBuilder_ == null) {
ensureNetworkConfigsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, networkConfigs_);
onChanged();
} else {
networkConfigsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Required. Virtual Private Cloud (VPC) networks that must be granted direct
* access to the Kafka cluster. Minimum of 1 network is required. Maximum 10
* networks can be specified.
* </pre>
*
* <code>
* repeated .google.cloud.managedkafka.v1.NetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearNetworkConfigs() {
if (networkConfigsBuilder_ == null) {
networkConfigs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
networkConfigsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Required. Virtual Private Cloud (VPC) networks that must be granted direct
* access to the Kafka cluster. Minimum of 1 network is required. Maximum 10
* networks can be specified.
* </pre>
*
* <code>
* repeated .google.cloud.managedkafka.v1.NetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder removeNetworkConfigs(int index) {
if (networkConfigsBuilder_ == null) {
ensureNetworkConfigsIsMutable();
networkConfigs_.remove(index);
onChanged();
} else {
networkConfigsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Required. Virtual Private Cloud (VPC) networks that must be granted direct
* access to the Kafka cluster. Minimum of 1 network is required. Maximum 10
* networks can be specified.
* </pre>
*
* <code>
* repeated .google.cloud.managedkafka.v1.NetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.managedkafka.v1.NetworkConfig.Builder getNetworkConfigsBuilder(
int index) {
return getNetworkConfigsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Required. Virtual Private Cloud (VPC) networks that must be granted direct
* access to the Kafka cluster. Minimum of 1 network is required. Maximum 10
* networks can be specified.
* </pre>
*
* <code>
* repeated .google.cloud.managedkafka.v1.NetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.managedkafka.v1.NetworkConfigOrBuilder getNetworkConfigsOrBuilder(
int index) {
if (networkConfigsBuilder_ == null) {
return networkConfigs_.get(index);
} else {
return networkConfigsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Required. Virtual Private Cloud (VPC) networks that must be granted direct
* access to the Kafka cluster. Minimum of 1 network is required. Maximum 10
* networks can be specified.
* </pre>
*
* <code>
* repeated .google.cloud.managedkafka.v1.NetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public java.util.List<? extends com.google.cloud.managedkafka.v1.NetworkConfigOrBuilder>
getNetworkConfigsOrBuilderList() {
if (networkConfigsBuilder_ != null) {
return networkConfigsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(networkConfigs_);
}
}
/**
*
*
* <pre>
* Required. Virtual Private Cloud (VPC) networks that must be granted direct
* access to the Kafka cluster. Minimum of 1 network is required. Maximum 10
* networks can be specified.
* </pre>
*
* <code>
* repeated .google.cloud.managedkafka.v1.NetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.managedkafka.v1.NetworkConfig.Builder addNetworkConfigsBuilder() {
return getNetworkConfigsFieldBuilder()
.addBuilder(com.google.cloud.managedkafka.v1.NetworkConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* Required. Virtual Private Cloud (VPC) networks that must be granted direct
* access to the Kafka cluster. Minimum of 1 network is required. Maximum 10
* networks can be specified.
* </pre>
*
* <code>
* repeated .google.cloud.managedkafka.v1.NetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.managedkafka.v1.NetworkConfig.Builder addNetworkConfigsBuilder(
int index) {
return getNetworkConfigsFieldBuilder()
.addBuilder(index, com.google.cloud.managedkafka.v1.NetworkConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* Required. Virtual Private Cloud (VPC) networks that must be granted direct
* access to the Kafka cluster. Minimum of 1 network is required. Maximum 10
* networks can be specified.
* </pre>
*
* <code>
* repeated .google.cloud.managedkafka.v1.NetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public java.util.List<com.google.cloud.managedkafka.v1.NetworkConfig.Builder>
getNetworkConfigsBuilderList() {
return getNetworkConfigsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.managedkafka.v1.NetworkConfig,
com.google.cloud.managedkafka.v1.NetworkConfig.Builder,
com.google.cloud.managedkafka.v1.NetworkConfigOrBuilder>
getNetworkConfigsFieldBuilder() {
if (networkConfigsBuilder_ == null) {
networkConfigsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.managedkafka.v1.NetworkConfig,
com.google.cloud.managedkafka.v1.NetworkConfig.Builder,
com.google.cloud.managedkafka.v1.NetworkConfigOrBuilder>(
networkConfigs_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
networkConfigs_ = null;
}
return networkConfigsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.managedkafka.v1.AccessConfig)
}
// @@protoc_insertion_point(class_scope:google.cloud.managedkafka.v1.AccessConfig)
private static final com.google.cloud.managedkafka.v1.AccessConfig DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.managedkafka.v1.AccessConfig();
}
public static com.google.cloud.managedkafka.v1.AccessConfig getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AccessConfig> PARSER =
new com.google.protobuf.AbstractParser<AccessConfig>() {
@java.lang.Override
public AccessConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<AccessConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AccessConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.AccessConfig getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/graal | 36,311 | sdk/src/org.graalvm.nativebridge/src/org/graalvm/nativebridge/ProcessIsolateThreadSupport.java | /*
* Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* The Universal Permissive License (UPL), Version 1.0
*
* Subject to the condition set forth below, permission is hereby granted to any
* person obtaining a copy of this software, associated documentation and/or
* data (collectively the "Software"), free of charge and under any and all
* copyright rights in the Software, and any and all patent rights owned or
* freely licensable by each licensor hereunder covering either (i) the
* unmodified Software as contributed to or provided by such licensor, or (ii)
* the Larger Works (as defined below), to deal in both
*
* (a) the Software, and
*
* (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
* one is included with the Software each a "Larger Work" to which the Software
* is contributed by such licensors),
*
* without restriction, including without limitation the rights to copy, create
* derivative works of, display, perform, and distribute the Software and make,
* use, sell, offer for sale, import, export, have made, and have sold the
* Software and the Larger Work(s), and to sublicense the foregoing rights on
* either these or other terms.
*
* This license is subject to the following condition:
*
* The above copyright notice and either this complete permission notice or at a
* minimum a reference to the UPL must be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.graalvm.nativebridge;
import java.io.Closeable;
import java.io.EOFException;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.net.StandardProtocolFamily;
import java.net.UnixDomainSocketAddress;
import java.nio.ByteBuffer;
import java.nio.channels.ClosedByInterruptException;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.PosixFilePermission;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CancellationException;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.FutureTask;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Provides support for managing threads in a process isolated polyglot environment. For each thread
* that is attached to the local process, a corresponding thread is created in a peer process. These
* threads communicate via an unnamed {@code AF_UNIX} socket, enabling inter-process communication
* and method calls between the two processes.
*
* <p>
* This support allows reentrant operations, where the initiating process calls a remote operation
* that is executed by a peer thread in the isolate subprocess. During this operation, the peer
* thread may invoke an upcall to the initiator process, which must be completed before the initial
* call can return.
* </p>
*
* <p>
* This class is thread-safe and ensures that all communication and operations between threads are
* properly synchronized. It also includes mechanisms for forcibly closing connections and handling
* interruptions during thread operations.
* </p>
* <p>
* <b>Usage:</b><br>
* Initiator process
*
* <pre>
* ProcessIsolateThreadSupport host = ProcessIsolateThreadSupport.newBuilder(dispatchSupport).socketNamePrefix("host").buildInitiator();
* Path localAddress = host.getLocalAddress();
* spawnSubprocess("/path/to/isolate/launcher", localAddress.toString());
* host.connect();
*
* ThreadEndPoint endPoint = host.attachThread();
* ByteBuffer response = endPoint.sendAndReceive(request);
* host.detachThread();
* </pre>
*
* Target process
*
* <pre>
* Path hostAddress = Path.of(args[0]);
* ProcessIsolateThreadSupport host = ProcessIsolateThreadSupport.newBuilder(dispatchSupport).socketNamePrefix("target").initiatorAddress(hostAddress).buildTarget();
* host.connectInCurrentThread();
* </pre>
* </p>
*/
final class ProcessIsolateThreadSupport {
private static final int EOF = -1;
private static final int ATTACH_HEADER_SIZE = 2 * Byte.BYTES + 2 * Integer.BYTES;
private static final int CALL_HEADER_SIZE = 2 * Byte.BYTES + Integer.BYTES;
private static final int INITIAL_REQUEST_CACHE_SIZE = 1 << 10;
private static final int MAX_REQUEST_CACHE_SIZE = 1 << 20;
private static final int MAX_INTERRUPTED_ATTACH_RETRIES = 10;
private final DispatchSupport dispatchSupport;
private final ServerSocketChannel local;
private UnixDomainSocketAddress peer;
private Thread listenThread;
private final Set<ThreadChannel> workerThreads = ConcurrentHashMap.newKeySet();
private final Set<ThreadChannel> attachedThreads = ConcurrentHashMap.newKeySet();
private final boolean initiator;
private volatile State state;
private ProcessIsolateThreadSupport(DispatchSupport dispatchSupport,
ServerSocketChannel local,
UnixDomainSocketAddress peer) {
this.dispatchSupport = Objects.requireNonNull(dispatchSupport);
this.local = Objects.requireNonNull(local);
this.peer = peer;
this.initiator = peer == null;
this.state = State.NEW;
}
static boolean isSupported() {
ServerSocketChannel serverSocket;
try {
try {
serverSocket = ServerSocketChannel.open(StandardProtocolFamily.UNIX);
} catch (UnsupportedOperationException unsupported) {
return false;
}
serverSocket.close();
} catch (IOException e) {
/*
* Handles cases where an {@link IOException} may be thrown due to the operating system
* being unable to allocate a new socket, such as when the process has exhausted its
* available file handles. Despite such failures, this method ensures that it still
* reports that AX_UNIX sockets are supported. The intention is to fail later when the
* socket is created with the correct error message.
*/
}
return true;
}
boolean isInitiator() {
return initiator;
}
/**
* Starts a new background thread to establishes a connection with the peer
* {@link ProcessIsolateThreadSupport} instance and to process thread attachment requests. The
* caller needs to wait until the returned {@link Future} is done before performing any requests
* to this {@link ProcessIsolateThreadSupport}. The {@link Future} has value {@code true} when
* connection was successful.
*/
Future<Boolean> connectInBackgroundThread() {
FutureTask<Boolean> result = new FutureTask<>(this::handleConnect);
Thread thread = new Thread(() -> {
result.run();
try {
result.get();
accept();
} catch (ExecutionException | CancellationException e) {
// connect failed do not listen
} catch (InterruptedException e) {
throw new AssertionError("Should not reach here", e);
}
});
thread.setName(String.format("%s Connection Listen Thread", ProcessIsolateThreadSupport.class.getSimpleName()));
thread.setDaemon(true);
thread.start();
return result;
}
/**
* Establishes a connection with the peer {@link ProcessIsolateThreadSupport} instance. The
* method uses the current thread to process thread attachment requests and exits when the
* connection to the isolate is closed.
*
* @throws IOException If an I/O error occurs while attempting to connect.
*/
void connectInCurrentThread() throws IOException {
if (handleConnect()) {
accept();
}
}
private synchronized boolean handleConnect() throws IOException {
if (state != State.NEW) {
throw new IllegalStateException("Already connected, current state: " + state);
}
if (this.peer == null) {
// host initiator
try (SocketChannel s = local.accept()) {
String peerAddress = readConnectRequest(s);
peer = UnixDomainSocketAddress.of(peerAddress);
} catch (CloseException ce) {
handleClose();
throw ce;
}
} else {
// isolate subprocess
try (SocketChannel s = SocketChannel.open(peer)) {
writeConnectRequest(s, getLocalAddress().toString());
}
installParentProcessWatchDog();
}
listenThread = Thread.currentThread();
state = State.CONNECTED;
return true;
}
private void installParentProcessWatchDog() {
Optional<ProcessHandle> parentOpt = ProcessHandle.current().parent();
if (parentOpt.isPresent()) {
ProcessHandle parent = parentOpt.get();
CompletableFuture<ProcessHandle> onExit = parent.onExit();
onExit.thenRun(() -> {
try {
handleClose();
} catch (IOException ioe) {
/*
* Exiting because the parent process has already terminated. At this point,
* exceptions are no longer relevant, we only need to terminate the child
* process, which has been re-parented to init (systemd).
*/
}
});
}
}
/**
* Closes the connection with the peer {@link ProcessIsolateThreadSupport} instance and
* terminates all worker threads. This method ensures that resources are cleaned up and the
* connection is terminated.
*
* @throws IOException If an I/O error occurs during the closing operation.
* @throws InterruptedException If the operation is interrupted while waiting for threads to
* terminate.
*/
synchronized void close() throws IOException, InterruptedException {
if (state == State.CLOSED) {
return;
}
state = State.CLOSED;
closeAndDeleteLocalSocket();
try (SocketChannel channel = SocketChannel.open(peer)) {
writeCloseRequest(channel);
} catch (IOException e) {
// Closing may cause IOExceptions if it's called simultaneously from both sides.
}
listenThread.join();
cancelWorkerThreads();
for (ThreadChannel threadChannel : attachedThreads) {
try {
threadChannel.close();
} catch (IOException ioe) {
/*
* IOException at this point is not a concern, as the isolate subprocess has already
* exited. Closing sockets merely frees resources.
*/
}
}
}
/**
* Attaches the current thread to the remote {@link ProcessIsolateThreadSupport} instance. This
* operation creates a new {@link Thread} in the remote process, corresponding to the calling
* thread. The threads communicate over an unnamed {@code AF_UNIX} socket for inter-process
* calls.
*
* @throws IOException If an I/O error occurs while attaching the thread.
*/
ThreadChannel attachThread() throws IOException {
checkState();
SocketChannel c = connectPeer();
c.configureBlocking(false);
writeAttachRequest(c, ThreadInfo.current());
ThreadChannel threadChannel = new ThreadChannel(this, c, null);
attachedThreads.add(threadChannel);
return threadChannel;
}
/**
* Connects to the peer process using blocking socket channel.
*
* <p>
* Using a non-blocking {@link SocketChannel} for the initial connect can fail on some Linux
* systems under high load, throwing a {@link java.net.SocketException} with
* {@code errno = EAGAIN (Resource temporarily unavailable)}. This makes non-blocking connect
* unreliable in such environments.
*
* <p>
* Although {@link Selector} and {@link SelectionKey#OP_CONNECT} can be used to wait for the
* completion of a connection, they cannot be used to initiate it. Therefore, this method
* performs the connect in blocking mode.
*
* <p>
* If the connect attempt is interrupted (e.g., due to {@link Thread#interrupt()}), the
* resulting {@link ClosedByInterruptException} is caught and ignored, and the method retries.
* This avoids propagating the exception, which is important for distinguishing between
* cancellation and interruption in {@code IsolateDeathHandler} as both {@code Context.close()}
* and {@code Context.interrupt()} interrupt threads.
*/
private SocketChannel connectPeer() throws IOException {
int interruptCount = 0;
try {
while (true) {
SocketChannel c = SocketChannel.open(StandardProtocolFamily.UNIX);
try {
c.connect(peer);
return c;
} catch (ClosedByInterruptException closed) {
if (interruptCount++ < MAX_INTERRUPTED_ATTACH_RETRIES) {
// Clear the thread interrupt status before retry
Thread.interrupted();
// Retry on interrupt to avoid leaking cancellation semantics into
// IsolateDeathHandler. Closing or interrupting contexts may interrupt this
// thread.
} else {
// Fail with IsolateDeathException on repeated interrupts to avoid livelock.
throw closed;
}
}
}
} finally {
if (interruptCount > 0 && !Thread.currentThread().isInterrupted()) {
Thread.currentThread().interrupt();
}
}
}
/**
* Retrieves the local address of the {@code AF_UNIX} socket used by this instance.
*
* @return The {@link Path} representing the local socket address.
* @throws IOException If an I/O error occurs while retrieving the address.
*/
Path getLocalAddress() throws IOException {
return ((UnixDomainSocketAddress) local.getLocalAddress()).getPath();
}
static Builder newBuilder(DispatchSupport dispatchSupport) {
return new Builder(dispatchSupport);
}
record Result(boolean success, ByteBuffer payload) {
private ResponseType responseType() {
return success ? ResponseType.SUCCESS : ResponseType.FAILURE;
}
}
interface DispatchSupport {
void onWorkerThreadStarted(Thread thread, ThreadChannel channel);
void onWorkerThreadTerminated(Thread thread, ThreadChannel channel);
Result dispatch(ByteBuffer message);
}
static final class Builder {
private final DispatchSupport dispatchSupport;
private Path localSocketAddress = Path.of(String.format("%s_%d", ProcessIsolateThreadSupport.class.getSimpleName(), ProcessHandle.current().pid()));
private Path initiatorAddress;
private Builder(DispatchSupport dispatchSupport) {
this.dispatchSupport = Objects.requireNonNull(dispatchSupport, "DispatchSupport must be non-null.");
}
Builder setLocalAddress(Path socketAddress) {
this.localSocketAddress = Objects.requireNonNull(socketAddress, "SocketAddress must be non-null.");
return this;
}
Builder setInitiatorAddress(Path address) {
this.initiatorAddress = Objects.requireNonNull(address, "Address must be non-null.");
return this;
}
/**
* Creates a new {@link ProcessIsolateThreadSupport} instance for an initiator process.
*
* @throws IOException If an I/O error occurs while setting up the socket.
*/
ProcessIsolateThreadSupport buildInitiator() throws IOException {
ServerSocketChannel serverSocket = openUnixDomainServerSocket(localSocketAddress);
return new ProcessIsolateThreadSupport(dispatchSupport, serverSocket, null);
}
/**
* Creates a new {@link ProcessIsolateThreadSupport} instance for an isolate subprocess. The
* subprocess connects to the initiator's {@link ProcessIsolateThreadSupport} instance and
* handles requests accordingly.
*
* @return A new instance of {@link ProcessIsolateThreadSupport} for the isolate subprocess.
* @throws IOException If an I/O error occurs while establishing the connection.
* @throws IllegalStateException if {@link #setInitiatorAddress(Path) initiatorAddress} was
* not set.
*/
ProcessIsolateThreadSupport buildTarget() throws IOException {
if (initiatorAddress == null) {
throw new IllegalStateException("InitiatorAddress must be set.");
}
ServerSocketChannel serverSocket = openUnixDomainServerSocket(localSocketAddress);
return new ProcessIsolateThreadSupport(dispatchSupport, serverSocket, UnixDomainSocketAddress.of(initiatorAddress));
}
private static ServerSocketChannel openUnixDomainServerSocket(Path socketPath) throws IOException {
if (Files.exists(socketPath)) {
throw new IllegalArgumentException(String.format("The socket '%s' already exists.", socketPath));
}
socketPath.toFile().deleteOnExit();
UnixDomainSocketAddress address = UnixDomainSocketAddress.of(socketPath);
ServerSocketChannel serverSocket = ServerSocketChannel.open(StandardProtocolFamily.UNIX);
serverSocket.bind(address);
if (socketPath.getFileSystem().supportedFileAttributeViews().contains("posix")) {
Files.setPosixFilePermissions(socketPath, EnumSet.of(PosixFilePermission.OWNER_READ, PosixFilePermission.OWNER_WRITE));
}
return serverSocket;
}
}
static final class ThreadChannel implements Closeable {
private final ProcessIsolateThreadSupport owner;
private final SocketChannel channel;
private final Selector selector;
private final SelectionKey readKey;
private final Thread workerThread;
private ThreadChannel(ProcessIsolateThreadSupport owner, SocketChannel channel, Thread workerThread) throws IOException {
this.owner = Objects.requireNonNull(owner, "Owner must be non-null");
this.channel = Objects.requireNonNull(channel, "Channel must be non-null");
/*
* We need to use non-blocking channel to support cancelling. The blocking channel is
* automatically closed whenever the thread calling read is interrupted.
*/
this.channel.configureBlocking(false);
this.selector = Selector.open();
this.readKey = channel.register(selector, SelectionKey.OP_READ);
this.workerThread = workerThread;
}
/**
* Detaches the current thread from the remote {@link ProcessIsolateThreadSupport} instance.
* This results in the termination of the corresponding thread in the remote process and the
* closure of the communication socket.
*
* @throws IOException If an I/O error occurs during the detachment process.
*/
@Override
public void close() throws IOException {
selector.close();
channel.configureBlocking(true);
channel.close();
}
/**
* Sends a request and awaits a response from the remote {@link ProcessIsolateThreadSupport}
* instance. While awaiting the completion of the request, nested requests from the remote
* process may be processed.
*
* @param data The data to be sent as part of the request.
* @return The response data received from the remote process.
* @throws IOException If an I/O error occurs during the request-response operation.
*/
Result sendAndReceive(ByteBuffer data) throws IOException {
owner.checkState();
ByteBuffer header = ByteBuffer.allocate(CALL_HEADER_SIZE);
header.put(RequestType.CALL.tag);
header.put(ResponseType.UNDEFINED.binaryForm);
header.putInt(data.limit() - data.position());
header.flip();
writeFully(channel, new ByteBuffer[]{header, data});
while (true) { // TERMINATION ARGUMENT: type
if (selector.select() == 0 || !selector.selectedKeys().remove(readKey)) {
continue;
}
header.clear();
readFully(channel, header);
header.flip();
RequestType type = RequestType.fromTag(header.get());
ResponseType responseType = ResponseType.fromBinaryForm(header.get());
int contentLength = header.getInt();
ByteBuffer target;
if (contentLength <= data.capacity()) {
data.clear();
data.limit(contentLength);
target = data;
} else {
target = ByteBuffer.allocate(contentLength);
}
readFully(channel, target);
switch (type) {
case CALL -> {
Result result = owner.dispatchSupport.dispatch(target);
target = result.payload;
header.clear();
header.put(RequestType.RESULT.tag);
header.put(result.responseType().binaryForm);
header.putInt(target.limit() - target.position());
header.flip();
writeFully(channel, new ByteBuffer[]{header, target});
}
case RESULT -> {
assert responseType != ResponseType.UNDEFINED;
boolean success = responseType == ResponseType.SUCCESS;
return new Result(success, target);
}
default -> throw throwIllegalRequest(type, RequestType.CALL, RequestType.RESULT);
}
}
}
private void dispatch() throws IOException {
ByteBuffer header = ByteBuffer.allocate(CALL_HEADER_SIZE);
ByteBuffer payloadCache = ByteBuffer.allocate(INITIAL_REQUEST_CACHE_SIZE);
while (owner.state == State.CONNECTED) {
if (selector.select() == 0 || !selector.selectedKeys().remove(readKey)) {
continue;
}
readFully(channel, header);
header.flip();
RequestType type = RequestType.fromTag(header.get());
if (type != RequestType.CALL) {
throw throwIllegalRequest(type, RequestType.CALL);
}
// Ignore error flag, used only in response
header.position(header.position() + 1);
int len = header.getInt();
ByteBuffer request;
if (len <= payloadCache.capacity()) {
request = payloadCache;
request.clear();
request.limit(len);
} else {
request = ByteBuffer.allocate(len);
if (len < MAX_REQUEST_CACHE_SIZE) {
payloadCache = request;
}
}
readFully(channel, request);
Result result = owner.dispatchSupport.dispatch(request);
ByteBuffer response = result.payload;
header.clear();
header.put(RequestType.RESULT.tag);
header.put(result.responseType().binaryForm);
header.putInt(response.limit() - response.position());
header.flip();
writeFully(channel, new ByteBuffer[]{header, response});
header.clear();
}
}
}
private static void writeConnectRequest(SocketChannel c, String line) throws IOException {
byte[] bytes = line.getBytes(StandardCharsets.UTF_8);
ByteBuffer header = ByteBuffer.allocate(ATTACH_HEADER_SIZE);
header.put(RequestType.CONNECT.tag);
header.putInt(bytes.length);
/*
* Align the message size to ATTACH_HEADER_SIZE. For performance reasons, CONNECT, ATTACH,
* and CLOSE requests must have the same size. It is preferable to send a larger request for
* CONNECT and CLOSE, which are called only once, rather than performing two read syscalls
* when handling ATTACH.
*/
header.position(header.limit());
header.flip();
ByteBuffer contentBuffer = ByteBuffer.wrap(bytes);
writeFully(c, new ByteBuffer[]{header, contentBuffer});
}
private static String readConnectRequest(SocketChannel c) throws IOException {
ByteBuffer header = ByteBuffer.allocate(ATTACH_HEADER_SIZE);
readFully(c, header);
header.flip();
RequestType type = RequestType.fromTag(header.get());
switch (type) {
case CONNECT -> {
int len = header.getInt();
byte[] bytes = new byte[len];
ByteBuffer buffer = ByteBuffer.wrap(bytes);
readFully(c, buffer);
return new String(bytes, 0, buffer.position(), StandardCharsets.UTF_8);
}
case CLOSE -> throw new CloseException();
default -> throw throwIllegalRequest(type, RequestType.CONNECT, RequestType.CLOSE);
}
}
private static RuntimeException throwIllegalRequest(RequestType type, RequestType expected, RequestType... expectedRest) {
String expectedNames = Stream.concat(Stream.of(expected), Arrays.stream(expectedRest)).map(Enum::name).collect(Collectors.joining(", "));
throw new IllegalStateException(String.format("Illegal request %s, expected %s", type, expectedNames));
}
private void accept() {
while (state == State.CONNECTED) {
SocketChannel peerThreadChannel = null;
try {
peerThreadChannel = local.accept();
ThreadInfo info = readAttachRequest(peerThreadChannel);
/*
* By default, the stack size for new threads is 512KB in native-image, compared to
* 2MB on HotSpot. We choose to use the larger size (2MB) for consistency and
* safety. Ideally, the host thread's actual stack size should be communicated as
* part of the attach request, but the Java API does not expose this information. To
* retrieve it, we would need to use a native library that calls
* pthread_get_stacksize_np(pthread_self()).
*/
Thread workerThread = new Thread(null, new DispatchRunnable(peerThreadChannel), info.name(), 2097152);
workerThread.setPriority(info.priority);
workerThread.setDaemon(info.daemon);
workerThread.start();
} catch (CloseException ce) {
try {
handleClose();
if (peerThreadChannel != null) {
peerThreadChannel.close();
}
} catch (IOException e) {
// Ignore close exception on exit.
}
} catch (IOException ioe) {
/*
* Connection failed, close the peerThreadChannel to notify client opening the
* connection.
*/
if (peerThreadChannel != null) {
try {
peerThreadChannel.close();
} catch (IOException e) {
// Ignore close exception on exit.
}
}
}
}
}
private void handleClose() throws IOException {
state = State.CLOSED;
closeAndDeleteLocalSocket();
try {
cancelWorkerThreads();
} catch (InterruptedException ie) {
throw new InterruptedIOException();
}
}
private void closeAndDeleteLocalSocket() throws IOException {
Path localAddress = getLocalAddress();
local.close();
try {
Files.deleteIfExists(localAddress);
} catch (IOException ioe) {
/*
* Failed to eagerly delete the socket file. This is not a critical issue since the file
* will be deleted automatically on JVM exit.
*/
}
}
private void cancelWorkerThreads() throws InterruptedException {
for (ThreadChannel worker : workerThreads) {
worker.selector.wakeup();
worker.workerThread.interrupt();
worker.workerThread.join();
}
}
private static void writeCloseRequest(SocketChannel channel) throws IOException {
ByteBuffer header = ByteBuffer.allocate(ATTACH_HEADER_SIZE);
header.put(RequestType.CLOSE.tag);
/*
* Align the message size to ATTACH_HEADER_SIZE. For performance reasons, CONNECT, ATTACH,
* and CLOSE requests must have the same size. It is preferable to send a larger request for
* CONNECT and CLOSE, which are called only once, rather than performing two read syscalls
* when handling ATTACH.
*/
header.position(header.limit());
header.flip();
writeFully(channel, header);
}
private static void writeAttachRequest(SocketChannel channel, ThreadInfo info) throws IOException {
byte[] nameBytes = info.name().getBytes(StandardCharsets.UTF_8);
ByteBuffer header = ByteBuffer.allocate(ATTACH_HEADER_SIZE);
header.put(RequestType.ATTACH.tag);
header.putInt(info.priority);
header.put((byte) (info.daemon() ? 1 : 0));
header.putInt(nameBytes.length);
header.flip();
ByteBuffer nameBuffer = ByteBuffer.wrap(nameBytes);
writeFully(channel, new ByteBuffer[]{header, nameBuffer});
}
private static ThreadInfo readAttachRequest(SocketChannel channel) throws IOException {
ByteBuffer header = ByteBuffer.allocate(ATTACH_HEADER_SIZE);
readFully(channel, header);
header.flip();
RequestType type = RequestType.fromTag(header.get());
switch (type) {
case ATTACH -> {
int priority = header.getInt();
boolean daemon = header.get() != 0;
int len = header.getInt();
byte[] nameBytes = new byte[len];
ByteBuffer buffer = ByteBuffer.wrap(nameBytes);
readFully(channel, buffer);
String name = new String(nameBytes, 0, buffer.position(), StandardCharsets.UTF_8);
return new ThreadInfo(name, priority, daemon);
}
case CLOSE -> throw new CloseException();
default -> throw throwIllegalRequest(type, RequestType.ATTACH, RequestType.CLOSE);
}
}
private void checkState() {
State currentState = state;
if (currentState != State.CONNECTED) {
throw new IllegalStateException("Must be connected, current state " + currentState);
}
}
private static void writeFully(SocketChannel channel, ByteBuffer buffer) throws IOException {
do {
if (channel.write(buffer) == EOF) {
throw new EOFException();
}
} while (buffer.hasRemaining());
}
private static void writeFully(SocketChannel channel, ByteBuffer[] buffers) throws IOException {
ByteBuffer lastBuffer = buffers[buffers.length - 1];
do {
if (channel.write(buffers) == EOF) {
throw new EOFException();
}
} while (lastBuffer.hasRemaining());
}
private static void readFully(SocketChannel channel, ByteBuffer buffer) throws IOException {
do {
if (channel.read(buffer) == EOF) {
throw new EOFException();
}
} while (buffer.hasRemaining());
}
private record ThreadInfo(String name, int priority, boolean daemon) {
static ThreadInfo current() {
Thread current = Thread.currentThread();
return new ThreadInfo(current.getName(), current.getPriority(), current.isDaemon());
}
}
private enum State {
NEW,
CONNECTED,
CLOSED
}
private enum RequestType {
CONNECT(0),
ATTACH(1),
CLOSE(2),
CALL(3),
RESULT(4);
private static final RequestType[] TYPES;
static {
RequestType[] values = values();
RequestType[] types = new RequestType[values.length];
for (RequestType rt : values) {
types[rt.tag] = rt;
}
TYPES = types;
}
final byte tag;
RequestType(int tag) {
this.tag = (byte) tag;
}
static RequestType fromTag(int tag) {
return TYPES[tag];
}
}
private enum ResponseType {
SUCCESS(1),
FAILURE(0),
UNDEFINED(-1);
final byte binaryForm;
ResponseType(int binaryForm) {
this.binaryForm = (byte) binaryForm;
}
static ResponseType fromBinaryForm(byte raw) {
for (ResponseType responseType : values()) {
if (responseType.binaryForm == raw) {
return responseType;
}
}
throw new IllegalArgumentException("Unsupported ResponseType binaryForm " + raw);
}
}
@SuppressWarnings("serial")
private static final class CloseException extends IOException {
}
final class DispatchRunnable implements Runnable {
private final SocketChannel peerThreadChannel;
private DispatchRunnable(SocketChannel peerThreadChannel) {
this.peerThreadChannel = peerThreadChannel;
}
@Override
public void run() {
Thread currentThread = Thread.currentThread();
try (ThreadChannel threadChannel = new ThreadChannel(ProcessIsolateThreadSupport.this, peerThreadChannel, currentThread)) {
workerThreads.add(threadChannel);
try {
dispatchSupport.onWorkerThreadStarted(currentThread, threadChannel);
try {
threadChannel.dispatch();
} finally {
dispatchSupport.onWorkerThreadTerminated(currentThread, threadChannel);
}
} finally {
workerThreads.remove(threadChannel);
}
} catch (IOException ioe) {
// Closes peerThreadChannel to notify client
}
}
}
}
|
apache/kylin | 36,114 | src/kylin-it/src/test/java/org/apache/kylin/newten/NFilePruningTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.newten;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.apache.hadoop.util.Shell;
import org.apache.kylin.common.util.DateFormat;
import org.apache.kylin.common.util.Pair;
import org.apache.kylin.common.util.RandomUtil;
import org.apache.kylin.common.util.TempMetadataBuilder;
import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
import org.apache.kylin.guava30.shaded.common.collect.Lists;
import org.apache.kylin.job.util.JobContextUtil;
import org.apache.kylin.junit.TimeZoneTestRunner;
import org.apache.kylin.metadata.cube.model.IndexPlan;
import org.apache.kylin.metadata.cube.model.LayoutEntity;
import org.apache.kylin.metadata.cube.model.NDataSegment;
import org.apache.kylin.metadata.cube.model.NDataSegmentManager;
import org.apache.kylin.metadata.cube.model.NDataflow;
import org.apache.kylin.metadata.cube.model.NDataflowManager;
import org.apache.kylin.metadata.model.NDataModelManager;
import org.apache.kylin.metadata.model.SegmentRange;
import org.apache.kylin.metadata.model.Segments;
import org.apache.kylin.metadata.project.EnhancedUnitOfWork;
import org.apache.kylin.metadata.project.NProjectManager;
import org.apache.kylin.query.relnode.ContextUtil;
import org.apache.kylin.util.ExecAndComp;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.SparderEnv;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.execution.KylinFileSourceScanExec;
import org.apache.spark.sql.execution.SparkPlan;
import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper;
import org.apache.spark.sql.internal.StaticSQLConf;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.sparkproject.guava.collect.Sets;
import lombok.val;
import scala.runtime.AbstractFunction1;
@RunWith(TimeZoneTestRunner.class)
public class NFilePruningTest extends NLocalWithSparkSessionTest implements AdaptiveSparkPlanHelper {
private final String base = "select count(*) FROM TEST_ORDER LEFT JOIN TEST_KYLIN_FACT ON TEST_KYLIN_FACT.ORDER_ID = TEST_ORDER.ORDER_ID ";
@BeforeClass
public static void initSpark() {
if (Shell.MAC)
overwriteSystemPropBeforeClass("org.xerial.snappy.lib.name", "libsnappyjava.jnilib");//for snappy
if (ss != null && !ss.sparkContext().isStopped()) {
ss.stop();
}
sparkConf = new SparkConf().setAppName(RandomUtil.randomUUIDStr()).setMaster("local[4]");
sparkConf.set("spark.serializer", "org.apache.spark.serializer.JavaSerializer");
sparkConf.set(StaticSQLConf.CATALOG_IMPLEMENTATION().key(), "in-memory");
sparkConf.set("spark.sql.shuffle.partitions", "1");
sparkConf.set("spark.memory.fraction", "0.1");
// opt memory
sparkConf.set("spark.shuffle.detectCorrupt", "false");
// For sinai_poc/query03, enable implicit cross join conversion
sparkConf.set("spark.sql.crossJoin.enabled", "true");
sparkConf.set("spark.sql.adaptive.enabled", "true");
sparkConf.set(StaticSQLConf.WAREHOUSE_PATH().key(),
TempMetadataBuilder.TEMP_TEST_METADATA + "/spark-warehouse");
ss = SparkSession.builder().config(sparkConf).getOrCreate();
SparderEnv.setSparkSession(ss);
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
this.createTestMetadata("src/test/resources/ut_meta/file_pruning");
JobContextUtil.cleanUp();
JobContextUtil.getJobContext(getTestConfig());
}
@Override
public String[] getOverlay() {
return new String[] { "src/test/resources/ut_meta/file_pruning" };
}
@Override
@After
public void tearDown() throws Exception {
JobContextUtil.cleanUp();
cleanupTestMetadata();
}
@Test
public void testNonExistTimeRangeExcludeEmpty() throws Exception {
val start = SegmentRange.dateToLong("2023-01-01 00:00:00");
val end = SegmentRange.dateToLong("2025-01-01 00:00:00");
val dfName = "8c670664-8d05-466a-802f-83c023b56c77";
NDataflowManager dsMgr = NDataflowManager.getInstance(getTestConfig(), getProject());
NDataflow df = dsMgr.getDataflow(dfName);
val layouts = df.getIndexPlan().getAllLayouts();
indexDataConstructor.buildIndex(dfName, new SegmentRange.TimePartitionedSegmentRange(start, end),
Sets.newLinkedHashSet(layouts), true);
assertResultsAndScanFiles(dfName, base, 0, false, Lists.newArrayList());
}
@Test
public void testNonExistTimeRangeIncludeEmpty() throws Exception {
overwriteSystemProp("kylin.query.skip-empty-segments", "false");
val start = SegmentRange.dateToLong("2023-01-01 00:00:00");
val end = SegmentRange.dateToLong("2025-01-01 00:00:00");
val dfName = "8c670664-8d05-466a-802f-83c023b56c77";
NDataflowManager dsMgr = NDataflowManager.getInstance(getTestConfig(), getProject());
NDataflow df = dsMgr.getDataflow(dfName);
val layouts = df.getIndexPlan().getAllLayouts();
indexDataConstructor.buildIndex(dfName, new SegmentRange.TimePartitionedSegmentRange(start, end),
Sets.newLinkedHashSet(layouts), true);
assertResultsAndScanFiles(dfName, base, 1, false, Lists.newArrayList());
}
@Test
public void testExistTimeRangeExcludeEmpty() throws Exception {
val start = SegmentRange.dateToLong("2013-01-01 00:00:00");
val end = SegmentRange.dateToLong("2025-01-01 00:00:00");
val dfName = "8c670664-8d05-466a-802f-83c023b56c77";
NDataflowManager dsMgr = NDataflowManager.getInstance(getTestConfig(), getProject());
NDataflow df = dsMgr.getDataflow(dfName);
val layouts = df.getIndexPlan().getAllLayouts();
indexDataConstructor.buildIndex(dfName, new SegmentRange.TimePartitionedSegmentRange(start, end),
Sets.newLinkedHashSet(layouts), true);
assertResultsAndScanFiles(dfName, base, 1, false, Lists.newArrayList());
}
@Test
public void testSegPruningWithTimeStamp() throws Exception {
// build three segs
// [2009-01-01 00:00:00, 2011-01-01 00:00:00)
// [2011-01-01 00:00:00, 2013-01-01 00:00:00)
// [2013-01-01 00:00:00, 2015-01-01 00:00:00)
val dfId = "8c670664-8d05-466a-802f-83c023b56c77";
buildMultiSegs(dfId, 10001);
populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
String and_pruning0 = base
+ "where TEST_TIME_ENC > TIMESTAMP '2011-01-01 00:00:00' and TEST_TIME_ENC < TIMESTAMP '2013-01-01 00:00:00'";
String and_pruning1 = base
+ "where TEST_TIME_ENC > TIMESTAMP '2011-01-01 00:00:00' and TEST_TIME_ENC = TIMESTAMP '2016-01-01 00:00:00'";
String or_pruning0 = base
+ "where TEST_TIME_ENC > TIMESTAMP '2011-01-01 00:00:00' or TEST_TIME_ENC = TIMESTAMP '2016-01-01 00:00:00'";
String or_pruning1 = base
+ "where TEST_TIME_ENC < TIMESTAMP '2009-01-01 00:00:00' or TEST_TIME_ENC > TIMESTAMP '2015-01-01 00:00:00'";
String pruning0 = base + "where TEST_TIME_ENC < TIMESTAMP '2009-01-01 00:00:00'";
String pruning1 = base + "where TEST_TIME_ENC <= TIMESTAMP '2009-01-01 00:00:00'";
String pruning2 = base + "where TEST_TIME_ENC >= TIMESTAMP '2015-01-01 00:00:00'";
String not_equal0 = base + "where TEST_TIME_ENC <> TIMESTAMP '2012-01-01 00:00:00'";
String not0 = base
+ "where not (TEST_TIME_ENC < TIMESTAMP '2011-01-01 00:00:00' or TEST_TIME_ENC >= TIMESTAMP '2013-01-01 00:00:00')";
String in_pruning0 = base
+ "where TEST_TIME_ENC in (TIMESTAMP '2009-01-01 00:00:00',TIMESTAMP '2008-01-01 00:00:00',TIMESTAMP '2016-01-01 00:00:00')";
String in_pruning1 = base
+ "where TEST_TIME_ENC in (TIMESTAMP '2008-01-01 00:00:00',TIMESTAMP '2016-01-01 00:00:00')";
val expectedRanges = Lists.<Pair<String, String>> newArrayList();
val segmentRange1 = Pair.newPair("2009-01-01 00:00:00", "2011-01-01 00:00:00");
val segmentRange2 = Pair.newPair("2011-01-01 00:00:00", "2013-01-01 00:00:00");
val segmentRange3 = Pair.newPair("2013-01-01 00:00:00", "2015-01-01 00:00:00");
expectedRanges.add(segmentRange1);
expectedRanges.add(segmentRange2);
expectedRanges.add(segmentRange3);
assertResultsAndScanFiles(dfId, base, 3, false, expectedRanges);
expectedRanges.clear();
expectedRanges.add(segmentRange2);
assertResultsAndScanFiles(dfId, and_pruning0, 1, false, expectedRanges);
expectedRanges.clear();
assertResultsAndScanFiles(dfId, and_pruning1, 0, true, expectedRanges);
expectedRanges.add(segmentRange2);
expectedRanges.add(segmentRange3);
assertResultsAndScanFiles(dfId, or_pruning0, 2, false, expectedRanges);
expectedRanges.clear();
assertResultsAndScanFiles(dfId, or_pruning1, 0, true, expectedRanges);
assertResultsAndScanFiles(dfId, pruning0, 0, true, expectedRanges);
expectedRanges.add(segmentRange1);
assertResultsAndScanFiles(dfId, pruning1, 1, false, expectedRanges);
expectedRanges.clear();
assertResultsAndScanFiles(dfId, pruning2, 0, true, expectedRanges);
// pruning with "not equal" is not supported
expectedRanges.add(segmentRange1);
expectedRanges.add(segmentRange2);
expectedRanges.add(segmentRange3);
assertResultsAndScanFiles(dfId, not_equal0, 3, false, expectedRanges);
expectedRanges.clear();
expectedRanges.add(segmentRange2);
assertResultsAndScanFiles(dfId, not0, 1, false, expectedRanges);
expectedRanges.clear();
expectedRanges.add(segmentRange1);
assertResultsAndScanFiles(dfId, in_pruning0, 1, false, expectedRanges);
assertResultsAndScanFiles(dfId, in_pruning1, 0, true, expectedRanges);
List<Pair<String, String>> query = new ArrayList<>();
query.add(Pair.newPair("base", base));
query.add(Pair.newPair("and_pruning0", and_pruning0));
query.add(Pair.newPair("or_pruning0", or_pruning0));
query.add(Pair.newPair("pruning1", pruning1));
query.add(Pair.newPair("not_equal0", not_equal0));
query.add(Pair.newPair("not0", not0));
query.add(Pair.newPair("in_pruning0", in_pruning0));
ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "default");
}
@Test
public void testShardPruning() throws Exception {
overwriteSystemProp("kylin.storage.columnar.shard-rowcount", "100");
val dfId = "8c670664-8d05-466a-802f-83c023b56c77";
buildMultiSegs(dfId);
populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
basicPruningScenario(dfId);
pruningWithVariousTypesScenario(dfId);
}
@Test
public void testPruningWithChineseCharacter() throws Exception {
overwriteSystemProp("kylin.storage.columnar.shard-rowcount", "1");
val dfId = "9cde9d25-9334-4b92-b229-a00f49453757";
fullBuild(dfId);
populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
val chinese0 = "select count(*) from TEST_MEASURE where name1 = '中国'";
val chinese1 = "select count(*) from TEST_MEASURE where name1 <> '中国'";
assertResultsAndScanFiles(dfId, chinese0, 1, false, Lists.newArrayList());
assertResultsAndScanFiles(dfId, chinese1, 4, false, Lists.newArrayList());
List<Pair<String, String>> query = new ArrayList<>();
query.add(Pair.newPair("", chinese0));
query.add(Pair.newPair("", chinese1));
ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "left");
}
private void pruningWithVariousTypesScenario(String dfId) throws Exception {
// int type is tested #basicPruningScenario
// xx0 means can pruning, while xx1 can not.
val bool0 = base + "where IS_EFFECTUAL = true";
val bool1 = base + "where IS_EFFECTUAL <> true";
val decimal0 = base + "where PRICE = 290.48";
val decimal1 = base + "where PRICE > 290.48";
val short0 = base + "where SLR_SEGMENT_CD = 16";
val short1 = base + "where SLR_SEGMENT_CD > 16";
val string0 = base + "where LSTG_FORMAT_NAME = 'Auction'";
val string1 = base + "where LSTG_FORMAT_NAME <> 'Auction'";
val long0 = base + "where TEST_ORDER.ORDER_ID = 2662";
val long1 = base + "where TEST_ORDER.ORDER_ID <> 2662";
val date0 = base + "where TEST_DATE_ENC = DATE '2011-07-10'";
val date1 = base + "where TEST_DATE_ENC <> DATE '2011-07-10'";
val ts0 = base + "where TEST_TIME_ENC = TIMESTAMP '2013-06-18 07:07:10'";
val ts1 = base + "where TEST_TIME_ENC > TIMESTAMP '2013-01-01 00:00:00' "
+ "and TEST_TIME_ENC < TIMESTAMP '2015-01-01 00:00:00' "
+ "and TEST_TIME_ENC <> TIMESTAMP '2013-06-18 07:07:10'";
assertResultsAndScanFiles(dfId, bool0, 3, false, Lists.newArrayList());
assertResultsAndScanFiles(dfId, bool1, 11, false, Lists.newArrayList());
assertResultsAndScanFiles(dfId, decimal0, 3, false, Lists.newArrayList());
assertResultsAndScanFiles(dfId, decimal1, 52, false, Lists.newArrayList());
// calcite will treat short as int. So pruning will not work.
assertResultsAndScanFiles(dfId, short0, 3, false, Lists.newArrayList());
assertResultsAndScanFiles(dfId, short1, 25, false, Lists.newArrayList());
assertResultsAndScanFiles(dfId, string0, 3, false, Lists.newArrayList());
assertResultsAndScanFiles(dfId, string1, 12, false, Lists.newArrayList());
assertResultsAndScanFiles(dfId, long0, 3, false, Lists.newArrayList());
assertResultsAndScanFiles(dfId, long1, 28, false, Lists.newArrayList());
assertResultsAndScanFiles(dfId, date0, 3, false, Lists.newArrayList());
assertResultsAndScanFiles(dfId, date1, 19, false, Lists.newArrayList());
// segment pruning first, then shard pruning
// so the scanned files is 1 not 3(each segment per shard)
assertResultsAndScanFiles(dfId, ts0, 1, false, Lists.newArrayList());
assertResultsAndScanFiles(dfId, ts1, 11, false, Lists.newArrayList());
List<Pair<String, String>> query = new ArrayList<>();
query.add(Pair.newPair("", bool0));
query.add(Pair.newPair("", bool1));
query.add(Pair.newPair("", decimal0));
query.add(Pair.newPair("", decimal1));
query.add(Pair.newPair("", short0));
query.add(Pair.newPair("", short1));
query.add(Pair.newPair("", string0));
query.add(Pair.newPair("", string1));
query.add(Pair.newPair("", long0));
query.add(Pair.newPair("", long1));
query.add(Pair.newPair("", date0));
query.add(Pair.newPair("", date1));
// see #11598
query.add(Pair.newPair("", ts0));
query.add(Pair.newPair("", ts1));
ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "left");
}
@Test
@Ignore("TODO: remove or adapt")
public void testSegmentPruningDate() throws Exception {
val modelId = "8c670664-8d05-466a-802f-83c023b56c80";
buildMultiSegs(modelId, 10005);
populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
val sql = "select test_date_enc, count(*) FROM TEST_ORDER LEFT JOIN TEST_KYLIN_FACT ON TEST_KYLIN_FACT.ORDER_ID = TEST_ORDER.ORDER_ID ";
val and_pruning0 = sql
+ "where test_date_enc > (Date '2011-01-01') and test_date_enc < (Date '2012-01-01') group by test_date_enc";
val and_pruning1 = sql
+ "where test_date_enc > '2011-01-01' and test_date_enc < '2012-01-01' group by test_date_enc";
val or_pruning0 = sql
+ "where test_date_enc > '2012-01-01' or test_date_enc = '2008-01-01' group by test_date_enc";
val or_pruning1 = sql
+ "where test_date_enc < '2011-01-01' or test_date_enc > '2013-01-01' group by test_date_enc";
val pruning0 = sql + "where test_date_enc > '2020-01-01' group by test_date_enc";
val pruning1 = sql + "where test_date_enc < '2008-01-01' group by test_date_enc";
val pruning2 = sql + "where test_date_enc = '2012-01-01' group by test_date_enc";
val not_pruning0 = sql
+ "where not (test_date_enc < '2011-01-01' or test_date_enc >= '2013-01-01') group by test_date_enc";
val not_pruning1 = sql + "where not test_date_enc = '2012-01-01' group by test_date_enc";
val nested_query0 = "with test_order as (select * from \"default\".test_order where test_date_enc > '2012-01-01' and test_date_enc < '2013-01-01')"
+ sql + "group by test_date_enc";
val nested_query1 = "select * from (select * from (" + sql
+ "where test_date_enc > '2011-01-01' group by test_date_enc) where test_date_enc < '2012-01-01')";
// date functions are not supported yet
val date_function_query0 = "select * from (select year(test_date_enc) as test_date_enc_year from (" + sql
+ "where test_date_enc > '2011-01-01' and test_date_enc < '2013-01-01' group by test_date_enc)) where test_date_enc_year = '2014'";
val between_query0 = sql + "where test_date_enc between '2011-01-01' and '2012-12-31' group by test_date_enc";
val in_query0 = sql
+ "where test_date_enc in (Date '2011-06-01', Date '2012-06-01', Date '2012-12-31') group by test_date_enc";
val in_query1 = sql
+ "where test_date_enc in ('2011-06-01', '2012-06-01', '2012-12-31') group by test_date_enc";
val not_in_query0 = sql
+ "where test_date_enc not in (Date '2011-06-01', Date '2012-06-01', Date '2013-06-01') group by test_date_enc";
val not_in_query1 = sql
+ "where test_date_enc not in ('2011-06-01', '2012-06-01', '2013-06-01') group by test_date_enc";
val complex_query0 = sql
+ "where test_date_enc in ('2011-01-01', '2012-01-01', '2013-01-01', '2014-01-01') and test_date_enc > '2013-01-01' group by test_date_enc";
val complex_query1 = sql
+ "where test_date_enc in (Date '2011-01-01', Date '2012-01-01', Date '2013-01-01', Date '2014-01-01') and test_date_enc > Date '2013-01-01' group by test_date_enc";
val expectedRanges = Lists.<Pair<String, String>> newArrayList();
val segmentRange1 = Pair.newPair("2009-01-01", "2011-01-01");
val segmentRange2 = Pair.newPair("2011-01-01", "2013-01-01");
val segmentRange3 = Pair.newPair("2013-01-01", "2015-01-01");
expectedRanges.add(segmentRange2);
assertResultsAndScanFiles(modelId, and_pruning0, 1, false, expectedRanges);
assertResultsAndScanFiles(modelId, and_pruning1, 1, false, expectedRanges);
expectedRanges.clear();
expectedRanges.add(segmentRange2);
expectedRanges.add(segmentRange3);
assertResultsAndScanFiles(modelId, or_pruning0, 2, false, expectedRanges);
expectedRanges.clear();
expectedRanges.add(segmentRange1);
expectedRanges.add(segmentRange3);
assertResultsAndScanFiles(modelId, or_pruning1, 2, false, expectedRanges);
expectedRanges.clear();
assertResultsAndScanFiles(modelId, pruning0, 0, true, expectedRanges);
assertResultsAndScanFiles(modelId, pruning1, 0, true, expectedRanges);
expectedRanges.add(segmentRange2);
assertResultsAndScanFiles(modelId, pruning2, 1, false, expectedRanges);
expectedRanges.clear();
expectedRanges.add(segmentRange2);
assertResultsAndScanFiles(modelId, not_pruning0, 1, false, expectedRanges);
expectedRanges.clear();
expectedRanges.add(segmentRange1);
expectedRanges.add(segmentRange2);
expectedRanges.add(segmentRange3);
assertResultsAndScanFiles(modelId, not_pruning1, 3, false, expectedRanges);
expectedRanges.clear();
expectedRanges.add(segmentRange2);
assertResultsAndScanFiles(modelId, nested_query0, 1, false, expectedRanges);
assertResultsAndScanFiles(modelId, nested_query1, 1, false, expectedRanges);
assertResultsAndScanFiles(modelId, between_query0, 1, false, expectedRanges);
assertResultsAndScanFiles(modelId, in_query0, 1, false, expectedRanges);
assertResultsAndScanFiles(modelId, in_query1, 1, false, expectedRanges);
expectedRanges.clear();
expectedRanges.add(segmentRange1);
expectedRanges.add(segmentRange2);
expectedRanges.add(segmentRange3);
assertResultsAndScanFiles(modelId, not_in_query0, 3, false, expectedRanges);
assertResultsAndScanFiles(modelId, not_in_query1, 3, false, expectedRanges);
expectedRanges.clear();
expectedRanges.add(segmentRange2);
assertResultsAndScanFiles(modelId, date_function_query0, 1, false, expectedRanges);
expectedRanges.clear();
expectedRanges.add(segmentRange3);
assertResultsAndScanFiles(modelId, complex_query0, 1, false, expectedRanges);
assertResultsAndScanFiles(modelId, complex_query1, 1, false, expectedRanges);
List<Pair<String, String>> query = Lists.newArrayList(//
Pair.newPair("", and_pruning0), Pair.newPair("", and_pruning1), //
Pair.newPair("", or_pruning0), Pair.newPair("", or_pruning1), //
Pair.newPair("", pruning2), //
Pair.newPair("", not_pruning0), Pair.newPair("", not_pruning1), //
Pair.newPair("", nested_query0), Pair.newPair("", nested_query1), //
Pair.newPair("", in_query0), Pair.newPair("", in_query1), //
Pair.newPair("", date_function_query0), //
Pair.newPair("", complex_query0), Pair.newPair("", complex_query1));
ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "left");
// kylin.query.heterogeneous-segment-enabled is turned off
val projectManager = NProjectManager.getInstance(getTestConfig());
projectManager.updateProject(getProject(), copyForWrite -> {
copyForWrite.getOverrideKylinProps().put("kylin.query.heterogeneous-segment-enabled", "false");
});
expectedRanges.clear();
val sqls = Lists.<String> newArrayList();
Collections.addAll(sqls, and_pruning0, and_pruning1, or_pruning0, or_pruning1, pruning0, pruning1, pruning2,
not_pruning0, not_pruning1, nested_query0, nested_query1, between_query0, in_query0, in_query1,
date_function_query0, complex_query0, complex_query1);
assertResultAndScanFilesForPruningDate(modelId, sqls, expectedRanges);
}
private void assertResultAndScanFilesForPruningDate(String modelId, List<String> sqls,
List<Pair<String, String>> expectedRanges) throws Exception {
assertResultsAndScanFiles(modelId, sqls.get(0), 1, false, expectedRanges);
assertResultsAndScanFiles(modelId, sqls.get(1), 1, false, expectedRanges);
assertResultsAndScanFiles(modelId, sqls.get(2), 2, false, expectedRanges);
assertResultsAndScanFiles(modelId, sqls.get(3), 2, false, expectedRanges);
assertResultsAndScanFiles(modelId, sqls.get(4), 0, false, expectedRanges);
assertResultsAndScanFiles(modelId, sqls.get(5), 0, false, expectedRanges);
assertResultsAndScanFiles(modelId, sqls.get(6), 1, false, expectedRanges);
assertResultsAndScanFiles(modelId, sqls.get(7), 1, false, expectedRanges);
assertResultsAndScanFiles(modelId, sqls.get(8), 3, false, expectedRanges);
assertResultsAndScanFiles(modelId, sqls.get(9), 1, false, expectedRanges);
assertResultsAndScanFiles(modelId, sqls.get(10), 1, false, expectedRanges);
assertResultsAndScanFiles(modelId, sqls.get(11), 1, false, expectedRanges);
assertResultsAndScanFiles(modelId, sqls.get(12), 1, false, expectedRanges);
assertResultsAndScanFiles(modelId, sqls.get(13), 3, false, expectedRanges);
assertResultsAndScanFiles(modelId, sqls.get(14), 1, false, expectedRanges);
assertResultsAndScanFiles(modelId, sqls.get(15), 1, false, expectedRanges);
assertResultsAndScanFiles(modelId, sqls.get(16), 1, false, expectedRanges);
}
@Test
public void testDimRangePruningAfterMerge() throws Exception {
String modelId = "3f152495-44de-406c-9abf-b11d4132aaed";
overwriteSystemProp("kylin.engine.persist-flattable-enabled", "true");
buildMultiSegAndMerge("3f152495-44de-406c-9abf-b11d4132aaed");
val ss = SparderEnv.getSparkSession();
populateSSWithCSVData(getTestConfig(), getProject(), ss);
val lessThanEquality = base + "where TEST_KYLIN_FACT.ORDER_ID <= 10";
val castIn = base + "where test_date_enc in ('2014-12-20', '2014-12-21')";
Instant startDate = DateFormat.stringToDate("2014-12-20").toInstant();
String[] dates = new String[ss.sqlContext().conf().optimizerInSetConversionThreshold() + 1];
for (int i = 0; i < dates.length; i++) {
dates[i] = "'" + DateFormat.formatToDateStr(startDate.plus(i, ChronoUnit.DAYS).toEpochMilli()) + "'";
}
val largeCastIn = base + "where test_date_enc in (" + String.join(",", dates) + ")";
val in = base + "where TEST_KYLIN_FACT.ORDER_ID in (4998, 4999)";
val lessThan = base + "where TEST_KYLIN_FACT.ORDER_ID < 10";
val and = base + "where PRICE < -99 AND TEST_KYLIN_FACT.ORDER_ID = 1";
val or = base + "where TEST_KYLIN_FACT.ORDER_ID = 1 or TEST_KYLIN_FACT.ORDER_ID = 10";
val notSupported0 = base + "where SELLER_ID <> 10000233";
val notSupported1 = base + "where SELLER_ID > 10000233";
val expectedRanges = Lists.<Pair<String, String>> newArrayList();
val segmentRange1 = Pair.newPair("2009-01-01 00:00:00", "2011-01-01 00:00:00");
val segmentRange2 = Pair.newPair("2011-01-01 00:00:00", "2015-01-01 00:00:00");
expectedRanges.add(segmentRange1);
expectedRanges.add(segmentRange2);
assertResultsAndScanFiles(modelId, largeCastIn, 1, false, expectedRanges);
assertResultsAndScanFiles(modelId, lessThanEquality, 2, false, expectedRanges);
assertResultsAndScanFiles(modelId, castIn, 1, false, expectedRanges);
assertResultsAndScanFiles(modelId, in, 1, false, expectedRanges);
assertResultsAndScanFiles(modelId, lessThan, 1, false, expectedRanges);
assertResultsAndScanFiles(modelId, and, 1, false, expectedRanges);
assertResultsAndScanFiles(modelId, or, 2, false, expectedRanges);
assertResultsAndScanFiles(modelId, notSupported0, 2, false, expectedRanges);
assertResultsAndScanFiles(modelId, notSupported1, 2, false, expectedRanges);
List<Pair<String, String>> query = new ArrayList<>();
query.add(Pair.newPair("", lessThanEquality));
query.add(Pair.newPair("", in));
query.add(Pair.newPair("", lessThan));
query.add(Pair.newPair("", and));
query.add(Pair.newPair("", or));
query.add(Pair.newPair("", notSupported0));
query.add(Pair.newPair("", notSupported1));
ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "left");
}
@Test
public void testMergeDimRange() throws Exception {
String dataflowId = "3f152495-44de-406c-9abf-b11d4132aaed";
String modelId = dataflowId;
overwriteSystemProp("kylin.engine.persist-flattable-enabled", "false");
buildMultiSegAndMerge(dataflowId);
populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
NDataflowManager dataflowManager = NDataflowManager.getInstance(getTestConfig(), getProject());
NDataflow dataflow = dataflowManager.getDataflow(dataflowId);
Segments<NDataSegment> segments = dataflow.getSegments();
Assert.assertEquals(2, segments.size());
NDataSegment mergedSegment = segments.get(1);
Assert.assertEquals(14, mergedSegment.getDimensionRangeInfoMap().size());
val priceTest = base + "where PRICE <= -99.7900";
val expectedRanges = Lists.<Pair<String, String>> newArrayList();
val segmentRange1 = Pair.newPair("2009-01-01 00:00:00", "2011-01-01 00:00:00");
val segmentRange2 = Pair.newPair("2011-01-01 00:00:00", "2015-01-01 00:00:00");
expectedRanges.add(segmentRange1);
expectedRanges.add(segmentRange2);
assertResultsAndScanFiles(modelId, priceTest, 1, false, expectedRanges);
List<Pair<String, String>> query = new ArrayList<>();
query.add(Pair.newPair("", priceTest));
ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "left");
}
@Test
public void testMergeDimRangeFalse() throws Exception {
String dataflowId = "3f152495-44de-406c-9abf-b11d4132aaed";
overwriteSystemProp("kylin.engine.persist-flattable-enabled", "false");
buildMultiSegs(dataflowId);
populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
NDataflowManager dataflowManager = NDataflowManager.getInstance(getTestConfig(), getProject());
NDataflow dataflow = dataflowManager.getDataflow(dataflowId);
Segments<NDataSegment> segments = dataflow.getSegments();
Assert.assertEquals(3, segments.size());
EnhancedUnitOfWork.doInTransactionWithCheckAndRetry(() -> {
NDataSegment nDataSegment = segments.get(1);
NDataSegmentManager.getInstance(getTestConfig(), getProject()).update(nDataSegment.getUuid(), copy -> {
copy.getDimensionRangeInfoMap().clear();
});
return null;
}, getProject());
NDataflowManager dsMgr = NDataflowManager.getInstance(getTestConfig(), getProject());
NDataflow df = dsMgr.getDataflow(dataflowId);
IndexPlan indexPlan = df.getIndexPlan();
List<LayoutEntity> layouts = indexPlan.getAllLayouts();
mergeSegments(dataflowId, Sets.newLinkedHashSet(layouts));
Segments<NDataSegment> segments2 = dataflowManager.getDataflow(dataflowId).getSegments();
Assert.assertEquals(2, segments2.size());
NDataSegment segment = segments2.get(1);
Assert.assertTrue(segment.getDimensionRangeInfoMap().isEmpty());
}
private void basicPruningScenario(String dfId) throws Exception {
// shard pruning supports: Equality/In/IsNull/And/Or
// other expression(gt/lt/like/cast/substr, etc.) will select all files.
val equality = base + "where SELLER_ID = 10000233";
val in = base + "where SELLER_ID in (10000233,10000234,10000235)";
val isNull = base + "where SELLER_ID is NULL";
val and = base + "where SELLER_ID in (10000233,10000234,10000235) and SELLER_ID = 10000233 ";
val or = base + "where SELLER_ID = 10000233 or SELLER_ID = 1 ";
val notSupported0 = base + "where SELLER_ID <> 10000233";
val notSupported1 = base + "where SELLER_ID > 10000233";
assertResultsAndScanFiles(dfId, equality, 3, false, Lists.newArrayList());
assertResultsAndScanFiles(dfId, in, 9, false, Lists.newArrayList());
assertResultsAndScanFiles(dfId, isNull, 3, false, Lists.newArrayList());
assertResultsAndScanFiles(dfId, and, 3, false, Lists.newArrayList());
assertResultsAndScanFiles(dfId, or, 4, false, Lists.newArrayList());
assertResultsAndScanFiles(dfId, notSupported0, 17, false, Lists.newArrayList());
assertResultsAndScanFiles(dfId, notSupported1, 17, false, Lists.newArrayList());
List<Pair<String, String>> query = new ArrayList<>();
query.add(Pair.newPair("", equality));
query.add(Pair.newPair("", in));
query.add(Pair.newPair("", isNull));
query.add(Pair.newPair("", and));
query.add(Pair.newPair("", or));
query.add(Pair.newPair("", notSupported0));
query.add(Pair.newPair("", notSupported1));
ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "left");
}
@Override
public String getProject() {
return "file_pruning";
}
private long assertResultsAndScanFiles(String modelId, String sql, long numScanFiles, boolean emptyLayout,
List<Pair<String, String>> expectedRanges) {
val df = ExecAndComp.queryModelWithoutCompute(getProject(), sql);
val context = ContextUtil.listContexts().get(0);
if (emptyLayout) {
Assert.assertTrue(context.getStorageContext().isDataSkipped());
Assert.assertEquals(-1L, context.getStorageContext().getBatchCandidate().getLayoutId());
return numScanFiles;
}
df.collect();
val actualNum = findFileSourceScanExec(df.queryExecution().executedPlan()).metrics().get("numFiles").get()
.value();
Assert.assertEquals(numScanFiles, actualNum);
val segmentIds = context.getStorageContext().getBatchCandidate().getPrunedSegments();
assertPrunedSegmentRange(modelId, segmentIds, expectedRanges);
return actualNum;
}
private KylinFileSourceScanExec findFileSourceScanExec(SparkPlan plan) {
return (KylinFileSourceScanExec) find(plan, new AbstractFunction1<SparkPlan, Object>() {
@Override
public Object apply(SparkPlan v1) {
return v1 instanceof KylinFileSourceScanExec;
}
}).get();
}
private void assertPrunedSegmentRange(String dfId, List<NDataSegment> prunedSegments,
List<Pair<String, String>> expectedRanges) {
val model = NDataModelManager.getInstance(getTestConfig(), getProject()).getDataModelDesc(dfId);
val partitionColDateFormat = model.getPartitionDesc().getPartitionDateFormat();
if (CollectionUtils.isEmpty(expectedRanges)) {
return;
}
Assert.assertEquals(expectedRanges.size(), prunedSegments.size());
for (int i = 0; i < prunedSegments.size(); i++) {
val segment = prunedSegments.get(i);
val start = DateFormat.formatToDateStr(segment.getTSRange().getStart(), partitionColDateFormat);
val end = DateFormat.formatToDateStr(segment.getTSRange().getEnd(), partitionColDateFormat);
val expectedRange = expectedRanges.get(i);
Assert.assertEquals(expectedRange.getFirst(), start);
Assert.assertEquals(expectedRange.getSecond(), end);
}
}
}
|
apache/nifi | 36,306 | nifi-framework-bundle/nifi-framework/nifi-framework-components/src/test/java/org/apache/nifi/util/TestFlowDifferenceFilters.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.util;
import org.apache.nifi.annotation.behavior.DynamicProperty;
import org.apache.nifi.annotation.behavior.DynamicRelationship;
import org.apache.nifi.components.ConfigurableComponent;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.connectable.ConnectableType;
import org.apache.nifi.connectable.Connection;
import org.apache.nifi.controller.ControllerService;
import org.apache.nifi.controller.ProcessorNode;
import org.apache.nifi.controller.flow.FlowManager;
import org.apache.nifi.controller.service.ControllerServiceNode;
import org.apache.nifi.flow.ComponentType;
import org.apache.nifi.flow.ScheduledState;
import org.apache.nifi.flow.VersionedConnection;
import org.apache.nifi.flow.VersionedControllerService;
import org.apache.nifi.flow.VersionedPort;
import org.apache.nifi.flow.VersionedProcessor;
import org.apache.nifi.flow.VersionedPropertyDescriptor;
import org.apache.nifi.flow.VersionedRemoteGroupPort;
import org.apache.nifi.groups.ProcessGroup;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.registry.flow.diff.DifferenceType;
import org.apache.nifi.registry.flow.diff.FlowDifference;
import org.apache.nifi.registry.flow.diff.StandardFlowDifference;
import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedConnection;
import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedControllerService;
import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedProcessor;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class TestFlowDifferenceFilters {
@Test
public void testFilterAddedRemotePortsWithRemoteInputPortAsComponentB() {
VersionedRemoteGroupPort remoteGroupPort = new VersionedRemoteGroupPort();
remoteGroupPort.setComponentType(ComponentType.REMOTE_INPUT_PORT);
StandardFlowDifference flowDifference = new StandardFlowDifference(
DifferenceType.COMPONENT_ADDED, null, remoteGroupPort, null, null, "");
// predicate should return false because we don't want to include changes for adding a remote input port
assertFalse(FlowDifferenceFilters.FILTER_ADDED_REMOVED_REMOTE_PORTS.test(flowDifference));
}
@Test
public void testFilterAddedRemotePortsWithRemoteInputPortAsComponentA() {
VersionedRemoteGroupPort remoteGroupPort = new VersionedRemoteGroupPort();
remoteGroupPort.setComponentType(ComponentType.REMOTE_INPUT_PORT);
StandardFlowDifference flowDifference = new StandardFlowDifference(
DifferenceType.COMPONENT_ADDED, remoteGroupPort, null, null, null, "");
// predicate should return false because we don't want to include changes for adding a remote input port
assertFalse(FlowDifferenceFilters.FILTER_ADDED_REMOVED_REMOTE_PORTS.test(flowDifference));
}
@Test
public void testFilterAddedRemotePortsWithRemoteOutputPort() {
VersionedRemoteGroupPort remoteGroupPort = new VersionedRemoteGroupPort();
remoteGroupPort.setComponentType(ComponentType.REMOTE_OUTPUT_PORT);
StandardFlowDifference flowDifference = new StandardFlowDifference(
DifferenceType.COMPONENT_ADDED, null, remoteGroupPort, null, null, "");
// predicate should return false because we don't want to include changes for adding a remote input port
assertFalse(FlowDifferenceFilters.FILTER_ADDED_REMOVED_REMOTE_PORTS.test(flowDifference));
}
@Test
public void testFilterAddedRemotePortsWithNonRemoteInputPort() {
VersionedProcessor versionedProcessor = new VersionedProcessor();
versionedProcessor.setComponentType(ComponentType.PROCESSOR);
StandardFlowDifference flowDifference = new StandardFlowDifference(
DifferenceType.COMPONENT_ADDED, null, versionedProcessor, null, null, "");
// predicate should return true because we do want to include changes for adding a non-port
assertTrue(FlowDifferenceFilters.FILTER_ADDED_REMOVED_REMOTE_PORTS.test(flowDifference));
}
@Test
public void testFilterPublicPortNameChangeWhenNotNameChange() {
final VersionedPort portA = new VersionedPort();
final VersionedPort portB = new VersionedPort();
final StandardFlowDifference flowDifference = new StandardFlowDifference(
DifferenceType.VERSIONED_FLOW_COORDINATES_CHANGED,
portA, portB,
"http://localhost:18080", "http://localhost:17080",
"");
assertTrue(FlowDifferenceFilters.FILTER_PUBLIC_PORT_NAME_CHANGES.test(flowDifference));
}
@Test
public void testFilterPublicPortNameChangeWhenNotAllowRemoteAccess() {
final VersionedPort portA = new VersionedPort();
final VersionedPort portB = new VersionedPort();
final StandardFlowDifference flowDifference = new StandardFlowDifference(
DifferenceType.NAME_CHANGED,
portA, portB,
"Port A", "Port B",
"");
assertTrue(FlowDifferenceFilters.FILTER_PUBLIC_PORT_NAME_CHANGES.test(flowDifference));
}
@Test
public void testFilterPublicPortNameChangeWhenAllowRemoteAccess() {
final VersionedPort portA = new VersionedPort();
portA.setAllowRemoteAccess(Boolean.TRUE);
final VersionedPort portB = new VersionedPort();
portB.setAllowRemoteAccess(Boolean.FALSE);
final StandardFlowDifference flowDifference = new StandardFlowDifference(
DifferenceType.NAME_CHANGED,
portA, portB,
"Port A", "Port B",
"");
assertFalse(FlowDifferenceFilters.FILTER_PUBLIC_PORT_NAME_CHANGES.test(flowDifference));
}
@Test
public void testFilterControllerServiceStatusChangeWhenNewStateIntroduced() {
final VersionedControllerService controllerServiceA = new VersionedControllerService();
final VersionedControllerService controllerServiceB = new VersionedControllerService();
controllerServiceA.setScheduledState(null);
controllerServiceB.setScheduledState(ScheduledState.DISABLED);
final StandardFlowDifference flowDifference = new StandardFlowDifference(
DifferenceType.SCHEDULED_STATE_CHANGED,
controllerServiceA, controllerServiceB,
controllerServiceA.getScheduledState(), controllerServiceB.getScheduledState(),
"");
assertTrue(FlowDifferenceFilters.isScheduledStateNew(flowDifference));
}
@Test
public void testIsLocalScheduleStateChangeWithNullComponentADoesNotNPE() {
// Simulate DEEP comparison producing a scheduled state change for a newly added component (no local A)
final FlowDifference flowDifference = Mockito.mock(FlowDifference.class);
Mockito.when(flowDifference.getDifferenceType()).thenReturn(DifferenceType.SCHEDULED_STATE_CHANGED);
Mockito.when(flowDifference.getComponentA()).thenReturn(null);
Mockito.when(flowDifference.getValueA()).thenReturn("RUNNING");
Mockito.when(flowDifference.getValueB()).thenReturn("RUNNING");
// Should not throw and should return false since no local component
assertFalse(FlowDifferenceFilters.isLocalScheduleStateChange(flowDifference));
}
@Test
public void testIsStaticPropertyRemovedFromDefinitionWhenPropertyDropped() {
final FlowManager flowManager = Mockito.mock(FlowManager.class);
final ProcessorNode processorNode = Mockito.mock(ProcessorNode.class);
final ConfigurableComponent configurableComponent = Mockito.mock(ConfigurableComponent.class);
final String propertyName = "Obsolete Property";
final String instanceId = "processor-instance";
Mockito.when(flowManager.getProcessorNode(instanceId)).thenReturn(processorNode);
Mockito.when(processorNode.getComponent()).thenReturn(configurableComponent);
Mockito.when(configurableComponent.getPropertyDescriptors()).thenReturn(List.of(new PropertyDescriptor.Builder().name("Retained Property").build()));
Mockito.when(configurableComponent.getPropertyDescriptor(propertyName)).thenReturn(null);
final InstantiatedVersionedProcessor localProcessor = new InstantiatedVersionedProcessor(instanceId, "group-id");
final FlowDifference difference = new StandardFlowDifference(
DifferenceType.PROPERTY_REMOVED,
localProcessor,
localProcessor,
propertyName,
"old",
null,
"Property removed in component definition");
assertTrue(FlowDifferenceFilters.isStaticPropertyRemoved(difference, flowManager));
}
@Test
public void testIsStaticPropertyRemovedFromDefinitionWhenDescriptorStillExists() {
final FlowManager flowManager = Mockito.mock(FlowManager.class);
final ProcessorNode processorNode = Mockito.mock(ProcessorNode.class);
final ConfigurableComponent configurableComponent = Mockito.mock(ConfigurableComponent.class);
final String propertyName = "Still Supported";
final String instanceId = "processor-instance";
Mockito.when(flowManager.getProcessorNode(instanceId)).thenReturn(processorNode);
Mockito.when(processorNode.getComponent()).thenReturn(configurableComponent);
Mockito.when(configurableComponent.getPropertyDescriptors()).thenReturn(List.of(new PropertyDescriptor.Builder().name(propertyName).build()));
final InstantiatedVersionedProcessor localProcessor = new InstantiatedVersionedProcessor(instanceId, "group-id");
final FlowDifference difference = new StandardFlowDifference(
DifferenceType.PROPERTY_REMOVED,
localProcessor,
localProcessor,
propertyName,
"old",
null,
"Property still defined");
assertFalse(FlowDifferenceFilters.isStaticPropertyRemoved(difference, flowManager));
}
@Test
public void testIsStaticPropertyRemovedFromDefinitionWhenDynamicSupported() {
final FlowManager flowManager = Mockito.mock(FlowManager.class);
final ProcessorNode processorNode = Mockito.mock(ProcessorNode.class);
final ConfigurableComponent configurableComponent = new DynamicAnnotationProcessor();
final String propertyName = "Dynamic Property";
final String instanceId = "processor-instance";
Mockito.when(flowManager.getProcessorNode(instanceId)).thenReturn(processorNode);
Mockito.when(processorNode.getComponent()).thenReturn(configurableComponent);
final InstantiatedVersionedProcessor localProcessor = new InstantiatedVersionedProcessor(instanceId, "group-id");
final FlowDifference difference = new StandardFlowDifference(
DifferenceType.PROPERTY_REMOVED,
localProcessor,
localProcessor,
propertyName,
"old",
null,
"Dynamic property removed");
assertFalse(FlowDifferenceFilters.isStaticPropertyRemoved(difference, flowManager));
}
@Test
public void testControllerServiceCreationPairedWithPropertyAdditionIsEnvironmentalChange() {
final FlowManager flowManager = Mockito.mock(FlowManager.class);
final ProcessorNode processorNode = Mockito.mock(ProcessorNode.class);
final ControllerServiceNode controllerServiceNode = Mockito.mock(ControllerServiceNode.class);
final String processorId = "processor-instance";
final String groupId = "group-id";
final String propertyName = "ABC";
final String controllerServiceId = "controller-service-id";
Mockito.when(flowManager.getProcessorNode(processorId)).thenReturn(processorNode);
Mockito.when(flowManager.getControllerServiceNode(controllerServiceId)).thenReturn(controllerServiceNode);
final PropertyDescriptor propertyDescriptor = new PropertyDescriptor.Builder()
.name(propertyName)
.identifiesControllerService(ControllerService.class)
.build();
Mockito.when(processorNode.getPropertyDescriptor(propertyName)).thenReturn(propertyDescriptor);
final InstantiatedVersionedProcessor instantiatedProcessor = new InstantiatedVersionedProcessor(processorId, groupId);
instantiatedProcessor.setComponentType(ComponentType.PROCESSOR);
final FlowDifference propertyDifference = new StandardFlowDifference(
DifferenceType.PROPERTY_ADDED,
instantiatedProcessor,
instantiatedProcessor,
propertyName,
null,
controllerServiceId,
"Controller service reference added");
final InstantiatedVersionedControllerService instantiatedControllerService = new InstantiatedVersionedControllerService(controllerServiceId, groupId);
instantiatedControllerService.setComponentType(ComponentType.CONTROLLER_SERVICE);
final FlowDifference controllerServiceDifference = new StandardFlowDifference(
DifferenceType.COMPONENT_ADDED,
null,
instantiatedControllerService,
null,
null,
"Controller service created");
final FlowDifferenceFilters.EnvironmentalChangeContext context = FlowDifferenceFilters.buildEnvironmentalChangeContext(
List.of(propertyDifference, controllerServiceDifference), flowManager);
assertFalse(FlowDifferenceFilters.isControllerServiceCreatedForNewProperty(propertyDifference, FlowDifferenceFilters.EnvironmentalChangeContext.empty()));
assertTrue(FlowDifferenceFilters.isControllerServiceCreatedForNewProperty(propertyDifference, context));
assertTrue(FlowDifferenceFilters.isControllerServiceCreatedForNewProperty(controllerServiceDifference, context));
assertFalse(FlowDifferenceFilters.isEnvironmentalChange(propertyDifference, null, flowManager));
assertTrue(FlowDifferenceFilters.isEnvironmentalChange(propertyDifference, null, flowManager, context));
assertTrue(FlowDifferenceFilters.isEnvironmentalChange(controllerServiceDifference, null, flowManager, context));
}
@Test
public void testControllerServiceCreationEnvironmentalChangeWithoutComponentNode() {
final FlowManager flowManager = Mockito.mock(FlowManager.class);
final String groupId = "group-id";
final String propertyName = "Request Rate Manager";
final String controllerServiceInstanceId = "service-instance-id";
final String controllerServiceVersionedId = "service-versioned-id";
final InstantiatedVersionedControllerService controllerServiceWithNewProperty = new InstantiatedVersionedControllerService("component-instance", groupId);
controllerServiceWithNewProperty.setComponentType(ComponentType.CONTROLLER_SERVICE);
controllerServiceWithNewProperty.setIdentifier(controllerServiceVersionedId);
controllerServiceWithNewProperty.setProperties(Map.of(propertyName, controllerServiceVersionedId));
final VersionedPropertyDescriptor versionedPropertyDescriptor = new VersionedPropertyDescriptor();
versionedPropertyDescriptor.setName(propertyName);
versionedPropertyDescriptor.setDisplayName(propertyName);
versionedPropertyDescriptor.setDynamic(false);
versionedPropertyDescriptor.setIdentifiesControllerService(true);
controllerServiceWithNewProperty.setPropertyDescriptors(Map.of(propertyName, versionedPropertyDescriptor));
final FlowDifference propertyDifference = new StandardFlowDifference(
DifferenceType.PROPERTY_ADDED,
null,
controllerServiceWithNewProperty,
propertyName,
null,
controllerServiceVersionedId,
"Controller service reference added");
final InstantiatedVersionedControllerService instantiatedControllerService = new InstantiatedVersionedControllerService(controllerServiceInstanceId, groupId);
instantiatedControllerService.setComponentType(ComponentType.CONTROLLER_SERVICE);
instantiatedControllerService.setIdentifier(controllerServiceVersionedId);
final FlowDifference controllerServiceDifference = new StandardFlowDifference(
DifferenceType.COMPONENT_ADDED,
null,
instantiatedControllerService,
null,
null,
"Controller service created");
final FlowDifferenceFilters.EnvironmentalChangeContext context = FlowDifferenceFilters.buildEnvironmentalChangeContext(
List.of(propertyDifference, controllerServiceDifference), flowManager);
assertTrue(FlowDifferenceFilters.isEnvironmentalChange(propertyDifference, null, flowManager, context));
assertTrue(FlowDifferenceFilters.isEnvironmentalChange(controllerServiceDifference, null, flowManager, context));
}
@Test
public void testPropertyRenameWithParameterizationObservedAsEnvironmentalChange() {
final FlowManager flowManager = Mockito.mock(FlowManager.class);
final ProcessorNode processorNode = Mockito.mock(ProcessorNode.class);
final String processorInstanceId = "processor-instance";
Mockito.when(flowManager.getProcessorNode(processorInstanceId)).thenReturn(processorNode);
final PropertyDescriptor renamedDescriptor = new PropertyDescriptor.Builder()
.name("Access Key ID")
.build();
Mockito.when(processorNode.getPropertyDescriptor("Access Key ID")).thenReturn(renamedDescriptor);
Mockito.when(processorNode.getPropertyDescriptor("Access Key")).thenReturn(null);
final VersionedProcessor versionedProcessorA = new VersionedProcessor();
versionedProcessorA.setComponentType(ComponentType.PROCESSOR);
versionedProcessorA.setIdentifier("versioned-id");
versionedProcessorA.setProperties(Map.of("Access Key", "#{AWS Access Key ID}"));
final InstantiatedVersionedProcessor instantiatedProcessorB = new InstantiatedVersionedProcessor(processorInstanceId, "group-id");
instantiatedProcessorB.setComponentType(ComponentType.PROCESSOR);
instantiatedProcessorB.setIdentifier("versioned-id");
instantiatedProcessorB.setProperties(Map.of("Access Key ID", "#{AWS Access Key ID}"));
final FlowDifference parameterizationRemoved = new StandardFlowDifference(
DifferenceType.PROPERTY_PARAMETERIZATION_REMOVED,
versionedProcessorA,
instantiatedProcessorB,
"Access Key ID",
null,
null,
"Property parameterization removed for Access Key");
final FlowDifference parameterized = new StandardFlowDifference(
DifferenceType.PROPERTY_PARAMETERIZED,
versionedProcessorA,
instantiatedProcessorB,
"Access Key ID",
null,
null,
"Property parameterized for Access Key ID");
final List<FlowDifference> differences = List.of(parameterizationRemoved, parameterized);
final FlowDifferenceFilters.EnvironmentalChangeContext context = FlowDifferenceFilters.buildEnvironmentalChangeContext(differences, flowManager);
assertFalse(FlowDifferenceFilters.isEnvironmentalChange(parameterizationRemoved, null, flowManager));
assertTrue(FlowDifferenceFilters.isEnvironmentalChange(parameterizationRemoved, null, flowManager, context));
assertTrue(FlowDifferenceFilters.isEnvironmentalChange(parameterized, null, flowManager, context));
}
@Test
public void testSelectedRelationshipChangeForNewRelationshipObservedAsEnvironmentalChange() {
final FlowManager flowManager = Mockito.mock(FlowManager.class);
final ProcessGroup processGroup = Mockito.mock(ProcessGroup.class);
final Connection connection = Mockito.mock(Connection.class);
final ProcessorNode processorNode = Mockito.mock(ProcessorNode.class);
final String connectionInstanceId = "connection-instance";
final String connectionGroupId = "group-id";
final String processorInstanceId = "processor-instance";
final String existingRelationshipName = "retry";
final String newRelationshipName = "restrictions changed";
final VersionedConnection connectionA = new VersionedConnection();
connectionA.setSelectedRelationships(Set.of(existingRelationshipName));
final InstantiatedVersionedConnection connectionB = new InstantiatedVersionedConnection(connectionInstanceId, connectionGroupId);
connectionB.setSelectedRelationships(Set.of(existingRelationshipName, newRelationshipName));
final FlowDifference difference = new StandardFlowDifference(
DifferenceType.SELECTED_RELATIONSHIPS_CHANGED,
connectionA,
connectionB,
null,
Set.of(existingRelationshipName),
Set.of(existingRelationshipName, newRelationshipName),
"Selected relationships updated");
final Relationship newRelationship = new Relationship.Builder().name(newRelationshipName).build();
Mockito.when(flowManager.getGroup(connectionGroupId)).thenReturn(processGroup);
Mockito.when(processGroup.getConnection(connectionInstanceId)).thenReturn(connection);
Mockito.when(connection.getIdentifier()).thenReturn(connectionInstanceId);
Mockito.when(connection.getSource()).thenReturn(processorNode);
Mockito.when(processorNode.getIdentifier()).thenReturn(processorInstanceId);
Mockito.when(processorNode.getConnectableType()).thenReturn(ConnectableType.PROCESSOR);
Mockito.when(flowManager.getProcessorNode(processorInstanceId)).thenReturn(processorNode);
Mockito.when(processorNode.getRelationship(newRelationshipName)).thenReturn(newRelationship);
Mockito.when(processorNode.isAutoTerminated(newRelationship)).thenReturn(false);
Mockito.when(processorNode.getProcessor()).thenReturn(new NonDynamicProcessor());
Mockito.when(processorNode.getConnections(newRelationship)).thenReturn(new HashSet<>(Set.of(connection)));
assertTrue(FlowDifferenceFilters.isEnvironmentalChange(difference, null, flowManager));
}
@Test
public void testPropertyRenameWithMatchingValueObservedAsEnvironmentalChange() {
final FlowManager flowManager = Mockito.mock(FlowManager.class);
final ProcessorNode processorNode = Mockito.mock(ProcessorNode.class);
final String processorInstanceId = "processor-instance";
Mockito.when(flowManager.getProcessorNode(processorInstanceId)).thenReturn(processorNode);
final String groupId = "group-id";
final String versionedId = "versioned-id";
final String controllerServiceId = "service-id";
final String legacyPropertyName = "box-client-service";
final String renamedPropertyName = "Box Client Service";
final VersionedProcessor versionedProcessor = new VersionedProcessor();
versionedProcessor.setComponentType(ComponentType.PROCESSOR);
versionedProcessor.setIdentifier(versionedId);
versionedProcessor.setProperties(Map.of(legacyPropertyName, controllerServiceId));
final InstantiatedVersionedProcessor instantiatedProcessor = new InstantiatedVersionedProcessor(processorInstanceId, groupId);
instantiatedProcessor.setComponentType(ComponentType.PROCESSOR);
instantiatedProcessor.setIdentifier(versionedId);
instantiatedProcessor.setProperties(Map.of(renamedPropertyName, controllerServiceId));
final FlowDifference propertyRemoved = new StandardFlowDifference(
DifferenceType.PROPERTY_REMOVED,
versionedProcessor,
instantiatedProcessor,
legacyPropertyName,
controllerServiceId,
null,
"Legacy property removed");
final FlowDifference propertyAdded = new StandardFlowDifference(
DifferenceType.PROPERTY_ADDED,
versionedProcessor,
instantiatedProcessor,
renamedPropertyName,
null,
controllerServiceId,
"Renamed property added");
final List<FlowDifference> differences = List.of(propertyRemoved, propertyAdded);
final FlowDifferenceFilters.EnvironmentalChangeContext context = FlowDifferenceFilters.buildEnvironmentalChangeContext(differences, flowManager);
assertTrue(FlowDifferenceFilters.isEnvironmentalChange(propertyRemoved, null, flowManager, context));
assertTrue(FlowDifferenceFilters.isEnvironmentalChange(propertyAdded, null, flowManager, context));
}
@Test
public void testSelectedRelationshipChangeNotEnvironmentalWhenRelationshipUsedElsewhere() {
final FlowManager flowManager = Mockito.mock(FlowManager.class);
final ProcessGroup processGroup = Mockito.mock(ProcessGroup.class);
final Connection connection = Mockito.mock(Connection.class);
final Connection otherConnection = Mockito.mock(Connection.class);
final ProcessorNode processorNode = Mockito.mock(ProcessorNode.class);
final String connectionInstanceId = "connection-instance";
final String otherConnectionId = "connection-other";
final String connectionGroupId = "group-id";
final String processorInstanceId = "processor-instance";
final String existingRelationshipName = "retry";
final String newRelationshipName = "restrictions changed";
final VersionedConnection connectionA = new VersionedConnection();
connectionA.setSelectedRelationships(Set.of(existingRelationshipName));
final InstantiatedVersionedConnection connectionB = new InstantiatedVersionedConnection(connectionInstanceId, connectionGroupId);
connectionB.setSelectedRelationships(Set.of(existingRelationshipName, newRelationshipName));
final FlowDifference difference = new StandardFlowDifference(
DifferenceType.SELECTED_RELATIONSHIPS_CHANGED,
connectionA,
connectionB,
null,
Set.of(existingRelationshipName),
Set.of(existingRelationshipName, newRelationshipName),
"Selected relationships updated");
final Relationship newRelationship = new Relationship.Builder().name(newRelationshipName).build();
Mockito.when(flowManager.getGroup(connectionGroupId)).thenReturn(processGroup);
Mockito.when(processGroup.getConnection(connectionInstanceId)).thenReturn(connection);
Mockito.when(connection.getIdentifier()).thenReturn(connectionInstanceId);
Mockito.when(connection.getSource()).thenReturn(processorNode);
Mockito.when(otherConnection.getIdentifier()).thenReturn(otherConnectionId);
Mockito.when(processorNode.getIdentifier()).thenReturn(processorInstanceId);
Mockito.when(processorNode.getConnectableType()).thenReturn(ConnectableType.PROCESSOR);
Mockito.when(flowManager.getProcessorNode(processorInstanceId)).thenReturn(processorNode);
Mockito.when(processorNode.getRelationship(newRelationshipName)).thenReturn(newRelationship);
Mockito.when(processorNode.isAutoTerminated(newRelationship)).thenReturn(false);
Mockito.when(processorNode.getProcessor()).thenReturn(new NonDynamicProcessor());
Mockito.when(processorNode.getConnections(newRelationship)).thenReturn(new HashSet<>(Set.of(connection, otherConnection)));
assertFalse(FlowDifferenceFilters.isEnvironmentalChange(difference, null, flowManager));
}
@Test
public void testSelectedRelationshipChangeNotEnvironmentalWhenProcessorHasDynamicRelationships() {
final FlowManager flowManager = Mockito.mock(FlowManager.class);
final ProcessGroup processGroup = Mockito.mock(ProcessGroup.class);
final Connection connection = Mockito.mock(Connection.class);
final ProcessorNode processorNode = Mockito.mock(ProcessorNode.class);
final String connectionInstanceId = "connection-instance";
final String connectionGroupId = "group-id";
final String processorInstanceId = "processor-instance";
final String existingRelationshipName = "retry";
final String newRelationshipName = "dynamic";
final VersionedConnection connectionA = new VersionedConnection();
connectionA.setSelectedRelationships(Set.of(existingRelationshipName));
final InstantiatedVersionedConnection connectionB = new InstantiatedVersionedConnection(connectionInstanceId, connectionGroupId);
connectionB.setSelectedRelationships(Set.of(existingRelationshipName, newRelationshipName));
final FlowDifference difference = new StandardFlowDifference(
DifferenceType.SELECTED_RELATIONSHIPS_CHANGED,
connectionA,
connectionB,
null,
Set.of(existingRelationshipName),
Set.of(existingRelationshipName, newRelationshipName),
"Selected relationships updated");
final Relationship newRelationship = new Relationship.Builder().name(newRelationshipName).build();
Mockito.when(flowManager.getGroup(connectionGroupId)).thenReturn(processGroup);
Mockito.when(processGroup.getConnection(connectionInstanceId)).thenReturn(connection);
Mockito.when(connection.getIdentifier()).thenReturn(connectionInstanceId);
Mockito.when(connection.getSource()).thenReturn(processorNode);
Mockito.when(processorNode.getIdentifier()).thenReturn(processorInstanceId);
Mockito.when(processorNode.getConnectableType()).thenReturn(ConnectableType.PROCESSOR);
Mockito.when(flowManager.getProcessorNode(processorInstanceId)).thenReturn(processorNode);
Mockito.when(processorNode.getRelationship(newRelationshipName)).thenReturn(newRelationship);
Mockito.when(processorNode.isAutoTerminated(newRelationship)).thenReturn(false);
Mockito.when(processorNode.getConnections(newRelationship)).thenReturn(new HashSet<>(Set.of(connection)));
Mockito.when(processorNode.getProcessor()).thenReturn(new DynamicRelationshipProcessor());
assertFalse(FlowDifferenceFilters.isEnvironmentalChange(difference, null, flowManager));
}
@Test
public void testSelectedRelationshipChangeWithRemovedRelationshipObservedAsEnvironmentalChange() {
final FlowManager flowManager = Mockito.mock(FlowManager.class);
final ProcessGroup processGroup = Mockito.mock(ProcessGroup.class);
final Connection connection = Mockito.mock(Connection.class);
final ProcessorNode processorNode = Mockito.mock(ProcessorNode.class);
final String connectionInstanceId = "connection-instance";
final String connectionGroupId = "group-id";
final String processorInstanceId = "processor-instance";
final String oldRelationshipName = "old";
final String firstNewRelationshipName = "newA";
final String secondNewRelationshipName = "newB";
final VersionedConnection connectionA = new VersionedConnection();
connectionA.setSelectedRelationships(Set.of(oldRelationshipName));
final InstantiatedVersionedConnection connectionB = new InstantiatedVersionedConnection(connectionInstanceId, connectionGroupId);
connectionB.setSelectedRelationships(Set.of(firstNewRelationshipName, secondNewRelationshipName));
final FlowDifference difference = new StandardFlowDifference(
DifferenceType.SELECTED_RELATIONSHIPS_CHANGED,
connectionA,
connectionB,
null,
Set.of(oldRelationshipName),
Set.of(firstNewRelationshipName, secondNewRelationshipName),
"Selected relationships updated");
final Relationship firstNewRelationship = new Relationship.Builder().name(firstNewRelationshipName).build();
final Relationship secondNewRelationship = new Relationship.Builder().name(secondNewRelationshipName).build();
Mockito.when(flowManager.getGroup(connectionGroupId)).thenReturn(processGroup);
Mockito.when(processGroup.getConnection(connectionInstanceId)).thenReturn(connection);
Mockito.when(connection.getIdentifier()).thenReturn(connectionInstanceId);
Mockito.when(connection.getSource()).thenReturn(processorNode);
Mockito.when(processorNode.getIdentifier()).thenReturn(processorInstanceId);
Mockito.when(processorNode.getConnectableType()).thenReturn(ConnectableType.PROCESSOR);
Mockito.when(flowManager.getProcessorNode(processorInstanceId)).thenReturn(processorNode);
Mockito.when(processorNode.getProcessor()).thenReturn(new NonDynamicProcessor());
Mockito.when(processorNode.getRelationship(firstNewRelationshipName)).thenReturn(firstNewRelationship);
Mockito.when(processorNode.getRelationship(secondNewRelationshipName)).thenReturn(secondNewRelationship);
Mockito.when(processorNode.getRelationship(oldRelationshipName)).thenReturn(null);
Mockito.when(processorNode.isAutoTerminated(firstNewRelationship)).thenReturn(false);
Mockito.when(processorNode.isAutoTerminated(secondNewRelationship)).thenReturn(false);
Mockito.when(processorNode.getConnections(firstNewRelationship)).thenReturn(new HashSet<>(Set.of(connection)));
Mockito.when(processorNode.getConnections(secondNewRelationship)).thenReturn(new HashSet<>(Set.of(connection)));
assertTrue(FlowDifferenceFilters.isEnvironmentalChange(difference, null, flowManager));
}
@DynamicProperty(name = "Dynamic Property", value = "Value", description = "Allows dynamic properties")
private static class DynamicAnnotationProcessor extends AbstractProcessor {
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
// No-op for testing
}
}
private static class NonDynamicProcessor extends AbstractProcessor {
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
// No-op for testing
}
}
@DynamicRelationship(name = "dynamic", description = "dynamic")
private static class DynamicRelationshipProcessor extends AbstractProcessor {
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
// No-op for testing
}
}
}
|
apache/oozie | 36,381 | core/src/test/java/org/apache/oozie/event/TestEventGeneration.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oozie.event;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.oozie.AppType;
import org.apache.oozie.CoordinatorActionBean;
import org.apache.oozie.CoordinatorJobBean;
import org.apache.oozie.DagEngine;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.action.ActionExecutor;
import org.apache.oozie.action.control.ControlNodeActionExecutor;
import org.apache.oozie.client.CoordinatorAction;
import org.apache.oozie.client.CoordinatorJob;
import org.apache.oozie.client.OozieClient;
import org.apache.oozie.client.WorkflowAction;
import org.apache.oozie.client.WorkflowJob;
import org.apache.oozie.client.event.Event;
import org.apache.oozie.client.event.JobEvent;
import org.apache.oozie.client.event.JobEvent.EventStatus;
import org.apache.oozie.client.rest.RestConstants;
import org.apache.oozie.command.CommandException;
import org.apache.oozie.command.coord.CoordActionCheckXCommand;
import org.apache.oozie.command.coord.CoordActionInputCheckXCommand;
import org.apache.oozie.command.coord.CoordMaterializeTransitionXCommand;
import org.apache.oozie.command.coord.CoordRerunXCommand;
import org.apache.oozie.command.coord.CoordResumeXCommand;
import org.apache.oozie.command.coord.CoordinatorXCommand;
import org.apache.oozie.command.wf.ActionCheckXCommand;
import org.apache.oozie.command.wf.ActionKillXCommand;
import org.apache.oozie.command.wf.ActionStartXCommand;
import org.apache.oozie.command.wf.ActionXCommand;
import org.apache.oozie.command.wf.KillXCommand;
import org.apache.oozie.command.wf.ResumeXCommand;
import org.apache.oozie.command.wf.SignalXCommand;
import org.apache.oozie.command.wf.StartXCommand;
import org.apache.oozie.command.wf.SuspendXCommand;
import org.apache.oozie.command.wf.WorkflowXCommand;
import org.apache.oozie.coord.CoordELFunctions;
import org.apache.oozie.executor.jpa.CoordActionGetJPAExecutor;
import org.apache.oozie.executor.jpa.CoordActionQueryExecutor;
import org.apache.oozie.executor.jpa.CoordActionQueryExecutor.CoordActionQuery;
import org.apache.oozie.executor.jpa.CoordJobQueryExecutor.CoordJobQuery;
import org.apache.oozie.executor.jpa.CoordJobGetJPAExecutor;
import org.apache.oozie.executor.jpa.CoordJobQueryExecutor;
import org.apache.oozie.executor.jpa.JPAExecutorException;
import org.apache.oozie.executor.jpa.WorkflowActionGetJPAExecutor;
import org.apache.oozie.executor.jpa.WorkflowActionInsertJPAExecutor;
import org.apache.oozie.executor.jpa.WorkflowActionQueryExecutor;
import org.apache.oozie.executor.jpa.WorkflowJobGetJPAExecutor;
import org.apache.oozie.executor.jpa.WorkflowJobInsertJPAExecutor;
import org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor;
import org.apache.oozie.executor.jpa.WorkflowActionQueryExecutor.WorkflowActionQuery;
import org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery;
import org.apache.oozie.service.ActionService;
import org.apache.oozie.service.EventHandlerService;
import org.apache.oozie.service.JPAService;
import org.apache.oozie.service.LiteWorkflowStoreService;
import org.apache.oozie.service.Services;
import org.apache.oozie.service.UUIDService;
import org.apache.oozie.test.XDataTestCase;
import org.apache.oozie.util.DateUtils;
import org.apache.oozie.util.IOUtils;
import org.apache.oozie.util.XConfiguration;
import org.apache.oozie.workflow.WorkflowApp;
import org.apache.oozie.workflow.WorkflowInstance;
import org.apache.oozie.workflow.lite.ActionNodeDef;
import org.apache.oozie.workflow.lite.EndNodeDef;
import org.apache.oozie.workflow.lite.LiteWorkflowApp;
import org.apache.oozie.workflow.lite.LiteWorkflowInstance;
import org.apache.oozie.workflow.lite.StartNodeDef;
import org.apache.oozie.workflow.lite.TestLiteWorkflowLib;
import org.apache.oozie.workflow.lite.TestLiteWorkflowLib.TestActionNodeHandler;
import org.apache.oozie.workflow.lite.TestLiteWorkflowLib.TestControlNodeHandler;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
/**
* Testcase to test that events are correctly generated from corresponding
* Commands and inserted into events queue
*/
public class TestEventGeneration extends XDataTestCase {
EventQueue queue;
Services services;
EventHandlerService ehs;
JPAService jpaService;
@Override
@Before
protected void setUp() throws Exception {
super.setUp();
services = new Services();
Configuration conf = services.getConf();
// The EventHandlerService manipulates the queues in the background, so the actual test results depend on the
// circumstances (like the speed of the machine, debugging etc).
conf.setInt("oozie.service.EventHandlerService.worker.threads", 0);
conf.set(Services.CONF_SERVICE_EXT_CLASSES, "org.apache.oozie.service.EventHandlerService");
services.init();
ehs = services.get(EventHandlerService.class);
queue = ehs.getEventQueue();
jpaService = services.get(JPAService.class);
}
@Override
@After
protected void tearDown() throws Exception {
Services.get().destroy();
super.tearDown();
}
@Test
public void testWorkflowJobEvent() throws Exception {
assertEquals(0, queue.size());
WorkflowApp app = new LiteWorkflowApp("testApp", "<workflow-app/>", new StartNodeDef(
LiteWorkflowStoreService.LiteControlNodeHandler.class, "fs-node")).addNode(
new ActionNodeDef("fs-node", "", TestLiteWorkflowLib.TestActionNodeHandler.class, "end", "end"))
.addNode(new EndNodeDef("end", LiteWorkflowStoreService.LiteControlNodeHandler.class));
WorkflowJobBean job = addRecordToWfJobTable(app, WorkflowJob.Status.PREP, WorkflowInstance.Status.PREP);
// Starting job
new StartXCommand(job.getId()).call();
WorkflowJobGetJPAExecutor wfJobGetCmd = new WorkflowJobGetJPAExecutor(job.getId());
job = jpaService.execute(wfJobGetCmd);
assertEquals(WorkflowJob.Status.RUNNING, job.getStatus());
assertEquals(1, queue.size());
JobEvent event = (JobEvent) queue.poll();
assertNotNull(event);
assertEquals(EventStatus.STARTED, event.getEventStatus());
assertEquals(AppType.WORKFLOW_JOB, event.getAppType());
assertEquals(job.getId(), event.getId());
assertEquals(job.getUser(), event.getUser());
assertEquals(job.getAppName(), event.getAppName());
assertEquals(job.getStartTime(), event.getStartTime());
assertEquals(0, queue.size());
// Suspending job
new SuspendXCommand(job.getId()).call();
job = jpaService.execute(wfJobGetCmd);
assertEquals(WorkflowJob.Status.SUSPENDED, job.getStatus());
assertEquals(1, queue.size());
event = (JobEvent) queue.poll();
assertNotNull(event);
assertEquals(EventStatus.SUSPEND, event.getEventStatus());
assertEquals(AppType.WORKFLOW_JOB, event.getAppType());
assertEquals(job.getId(), event.getId());
assertEquals(job.getUser(), event.getUser());
assertEquals(job.getAppName(), event.getAppName());
assertEquals(0, queue.size());
// Resuming job
new ResumeXCommand(job.getId()).call();
job = jpaService.execute(wfJobGetCmd);
assertEquals(WorkflowJob.Status.RUNNING, job.getStatus());
assertEquals(1, queue.size());
event = (JobEvent) queue.poll();
assertNotNull(event);
assertEquals(AppType.WORKFLOW_JOB, event.getAppType());
assertEquals(job.getId(), event.getId());
assertEquals(job.getUser(), event.getUser());
assertEquals(job.getAppName(), event.getAppName());
assertEquals(job.getStartTime(), event.getStartTime());
assertEquals(0, queue.size());
// Killing job
new KillXCommand(job.getId()).call();
job = jpaService.execute(wfJobGetCmd);
assertEquals(WorkflowJob.Status.KILLED, job.getStatus());
assertEquals(1, queue.size());
event = (JobEvent) queue.poll();
assertNotNull(event);
assertEquals(EventStatus.FAILURE, event.getEventStatus());
assertEquals(AppType.WORKFLOW_JOB, event.getAppType());
assertEquals(job.getId(), event.getId());
assertEquals(job.getUser(), event.getUser());
assertEquals(job.getAppName(), event.getAppName());
assertEquals(job.getStartTime(), event.getStartTime());
assertEquals(job.getEndTime(), event.getEndTime());
assertEquals(0, queue.size());
// Successful job (testing SignalX)
job = _createWorkflowJob();
LiteWorkflowInstance wfInstance = (LiteWorkflowInstance) job.getWorkflowInstance();
wfInstance.start();
job.setWorkflowInstance(wfInstance);
WorkflowJobQueryExecutor.getInstance().executeUpdate(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_INSTANCE_MODIFIED, job);
WorkflowActionBean wfAction = jpaService.execute(new WorkflowActionGetJPAExecutor(job.getId() + "@one"));
new SignalXCommand(job.getId(), wfAction.getId()).call();
job = jpaService.execute(new WorkflowJobGetJPAExecutor(job.getId()));
assertEquals(WorkflowJob.Status.SUCCEEDED, job.getStatus());
assertEquals(1, queue.size());
event = (JobEvent) queue.poll();
assertNotNull(event);
assertEquals(AppType.WORKFLOW_JOB, event.getAppType());
assertEquals(job.getId(), event.getId());
assertEquals(job.getUser(), event.getUser());
assertEquals(job.getAppName(), event.getAppName());
assertEquals(job.getStartTime(), event.getStartTime());
assertEquals(job.getEndTime(), event.getEndTime());
}
@Test
public void testCoordinatorActionEvent() throws Exception {
// avoid noise from other apptype events by setting it to only
// coord action
ehs.setAppTypes(new HashSet<String>(Arrays.asList("coordinator_action")));
assertEquals(queue.size(), 0);
Date startTime = DateUtils.parseDateOozieTZ("2013-01-01T10:00Z");
Date endTime = DateUtils.parseDateOozieTZ("2013-01-01T10:01Z");
CoordinatorJobBean coord = addRecordToCoordJobTable(CoordinatorJob.Status.RUNNING, startTime, endTime, false,
false, 0);
modifyCoordForRunning(coord);
// Action WAITING on materialization
new CoordMaterializeTransitionXCommand(coord.getId(), 3600).call();
final CoordActionGetJPAExecutor coordGetCmd = new CoordActionGetJPAExecutor(coord.getId() + "@1");
CoordinatorActionBean action = jpaService.execute(coordGetCmd);
assertEquals(CoordinatorAction.Status.WAITING, action.getStatus());
assertEquals(1, queue.size());
JobEvent event = (JobEvent) queue.poll();
assertNotNull(event);
assertEquals(EventStatus.WAITING, event.getEventStatus());
assertEquals(AppType.COORDINATOR_ACTION, event.getAppType());
assertEquals(action.getId(), event.getId());
assertEquals(action.getJobId(), event.getParentId());
assertEquals(action.getNominalTime(), ((CoordinatorActionEvent) event).getNominalTime());
assertNull(event.getStartTime());
assertEquals(coord.getUser(), event.getUser());
assertEquals(coord.getAppName(), event.getAppName());
assertEquals(0, queue.size());
// Make Action ready
// In this case it will proceed to Running since n(ready_actions) < concurrency
new CoordActionInputCheckXCommand(action.getId(), coord.getId()).call();
action = jpaService.execute(coordGetCmd);
assertEquals(CoordinatorAction.Status.RUNNING, action.getStatus());
event = (JobEvent) queue.poll();
assertEquals(EventStatus.STARTED, event.getEventStatus());
assertEquals(AppType.COORDINATOR_ACTION, event.getAppType());
assertEquals(action.getId(), event.getId());
assertEquals(action.getJobId(), event.getParentId());
assertEquals(action.getNominalTime(), ((CoordinatorActionEvent) event).getNominalTime());
WorkflowJobBean wfJob = jpaService.execute(new WorkflowJobGetJPAExecutor(action.getExternalId()));
assertEquals(wfJob.getStartTime(), event.getStartTime());
assertEquals(coord.getUser(), event.getUser());
assertEquals(coord.getAppName(), event.getAppName());
sleep(2000);
// Action Success
wfJob.setStatus(WorkflowJob.Status.SUCCEEDED);
WorkflowJobQueryExecutor.getInstance().executeUpdate(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_MODTIME, wfJob);
action.setStatus(CoordinatorAction.Status.RUNNING);
CoordActionQueryExecutor.getInstance().executeUpdate(CoordActionQuery.UPDATE_COORD_ACTION_STATUS_PENDING_TIME, action);
new CoordActionCheckXCommand(action.getId(), 0).call();
action = jpaService.execute(coordGetCmd);
assertEquals(CoordinatorAction.Status.SUCCEEDED, action.getStatus());
List<Event> list = queue.pollBatch();
event = (JobEvent)list.get(list.size()-1);
assertEquals(EventStatus.SUCCESS, event.getEventStatus());
assertEquals(AppType.COORDINATOR_ACTION, event.getAppType());
assertEquals(action.getId(), event.getId());
assertEquals(action.getJobId(), event.getParentId());
assertEquals(action.getNominalTime(), ((CoordinatorActionEvent) event).getNominalTime());
assertEquals(wfJob.getStartTime(), event.getStartTime());
assertEquals(coord.getUser(), event.getUser());
assertEquals(coord.getAppName(), event.getAppName());
// Action Failure
wfJob.setStatus(WorkflowJob.Status.FAILED);
action.setStatus(CoordinatorAction.Status.RUNNING);
CoordActionQueryExecutor.getInstance().executeUpdate(CoordActionQuery.UPDATE_COORD_ACTION_STATUS_PENDING_TIME, action);
WorkflowJobQueryExecutor.getInstance().executeUpdate(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_MODTIME, wfJob);
new CoordActionCheckXCommand(action.getId(), 0).call();
action = jpaService.execute(coordGetCmd);
assertEquals(CoordinatorAction.Status.FAILED, action.getStatus());
event = (JobEvent) queue.poll();
assertEquals(EventStatus.FAILURE, event.getEventStatus());
assertEquals(AppType.COORDINATOR_ACTION, event.getAppType());
assertEquals(action.getId(), event.getId());
assertEquals(action.getJobId(), event.getParentId());
assertEquals(action.getNominalTime(), ((CoordinatorActionEvent) event).getNominalTime());
assertEquals(wfJob.getStartTime(), event.getStartTime());
assertEquals(coord.getUser(), event.getUser());
assertEquals(coord.getAppName(), event.getAppName());
// Action Suspended
wfJob.setStatus(WorkflowJob.Status.SUSPENDED);
action.setStatus(CoordinatorAction.Status.RUNNING);
CoordActionQueryExecutor.getInstance().executeUpdate(CoordActionQuery.UPDATE_COORD_ACTION_STATUS_PENDING_TIME,
action);
WorkflowJobQueryExecutor.getInstance().executeUpdate(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_MODTIME, wfJob);
new CoordActionCheckXCommand(action.getId(), 0).call();
action = jpaService.execute(coordGetCmd);
assertEquals(CoordinatorAction.Status.SUSPENDED, action.getStatus());
event = (JobEvent) queue.poll();
assertEquals(EventStatus.SUSPEND, event.getEventStatus());
assertEquals(AppType.COORDINATOR_ACTION, event.getAppType());
assertEquals(action.getId(), event.getId());
assertEquals(action.getJobId(), event.getParentId());
assertEquals(action.getNominalTime(), ((CoordinatorActionEvent) event).getNominalTime());
assertEquals(wfJob.getStartTime(), event.getStartTime());
assertEquals(coord.getUser(), event.getUser());
assertEquals(coord.getAppName(), event.getAppName());
// Action start on Coord Resume
coord.setStatus(CoordinatorJobBean.Status.SUSPENDED);
CoordJobQueryExecutor.getInstance().executeUpdate(CoordJobQuery.UPDATE_COORD_JOB_STATUS, coord);
action.setStatus(CoordinatorAction.Status.SUSPENDED);
CoordActionQueryExecutor.getInstance().executeUpdate(CoordActionQuery.UPDATE_COORD_ACTION_STATUS_PENDING_TIME, action);
wfJob.setStatus(WorkflowJob.Status.SUSPENDED);
WorkflowInstance wfInstance = wfJob.getWorkflowInstance();
((LiteWorkflowInstance) wfInstance).setStatus(WorkflowInstance.Status.SUSPENDED);
wfJob.setWorkflowInstance(wfInstance);
WorkflowJobQueryExecutor.getInstance().executeUpdate(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_INSTANCE_MODIFIED, wfJob);
queue.clear();
new CoordResumeXCommand(coord.getId()).call();
waitForEventGeneration(1000);
CoordinatorActionEvent cevent = (CoordinatorActionEvent) queue.poll();
assertEquals(EventStatus.STARTED, cevent.getEventStatus());
assertEquals(AppType.COORDINATOR_ACTION, cevent.getAppType());
assertEquals(action.getId(), cevent.getId());
assertEquals(action.getJobId(), cevent.getParentId());
assertEquals(action.getNominalTime(), cevent.getNominalTime());
coord = CoordJobQueryExecutor.getInstance().get(CoordJobQuery.GET_COORD_JOB, coord.getId());
assertEquals(coord.getLastModifiedTime(), cevent.getStartTime());
// Action going to WAITING on Coord Rerun
action.setStatus(CoordinatorAction.Status.KILLED);
CoordActionQueryExecutor.getInstance().executeUpdate(CoordActionQuery.UPDATE_COORD_ACTION_STATUS_PENDING_TIME, action);
queue.clear();
new CoordRerunXCommand(coord.getId(), RestConstants.JOB_COORD_SCOPE_ACTION, "1", false, true, false,
null).call();
waitFor(3 * 100, new Predicate() {
@Override
public boolean evaluate() throws Exception {
return jpaService.execute(coordGetCmd).getStatus() == CoordinatorAction.Status.WAITING;
}
});
cevent = (CoordinatorActionEvent) queue.poll();
assertEquals(EventStatus.WAITING, cevent.getEventStatus());
assertEquals(AppType.COORDINATOR_ACTION, cevent.getAppType());
assertEquals(action.getId(), cevent.getId());
assertEquals(action.getJobId(), cevent.getParentId());
assertEquals(action.getNominalTime(), cevent.getNominalTime());
assertEquals(wfJob.getStartTime(), event.getStartTime());
assertNotNull(cevent.getMissingDeps());
}
@Test
public void testWorkflowActionEvent() throws Exception {
assertEquals(queue.size(), 0);
// avoid noise from other apptype events by setting it to only
// workflow action
ehs.setAppTypes(new HashSet<String>(Arrays.asList("workflow_action")));
WorkflowJobBean job = this.addRecordToWfJobTable(WorkflowJob.Status.RUNNING, WorkflowInstance.Status.RUNNING);
WorkflowActionBean action = this.addRecordToWfActionTable(job.getId(), "1", WorkflowAction.Status.PREP, true);
// adding record sets externalChildID to dummy workflow-id so resetting it
action.setExternalChildIDs(null);
WorkflowActionQueryExecutor.getInstance().executeUpdate(WorkflowActionQuery.UPDATE_ACTION_START, action);
// Starting job
new ActionStartXCommand(action.getId(), "map-reduce").call();
WorkflowActionGetJPAExecutor wfActionGetCmd = new WorkflowActionGetJPAExecutor(action.getId());
action = jpaService.execute(wfActionGetCmd);
assertEquals(WorkflowAction.Status.RUNNING, action.getStatus());
assertEquals(1, queue.size());
WorkflowActionEvent event = (WorkflowActionEvent) queue.poll();
assertNotNull(event);
assertEquals(EventStatus.STARTED, event.getEventStatus());
assertEquals(AppType.WORKFLOW_ACTION, event.getAppType());
assertEquals(action.getId(), event.getId());
assertEquals(job.getUser(), event.getUser());
assertEquals(action.getName(), event.getAppName());
assertEquals(action.getStartTime(), event.getStartTime());
assertEquals(0, queue.size());
// Suspending job
ActionExecutor.Context context = new ActionXCommand.ActionExecutorContext(job, action, false, false);
ActionExecutor executor = Services.get().get(ActionService.class).getExecutor(action.getType());
ActionCheckXCommandForTest dac = new ActionCheckXCommandForTest(context, executor, action.getId());
dac.execute();
action = dac.getAction();
assertEquals(WorkflowAction.Status.START_MANUAL, action.getStatus());
assertEquals(1, queue.size());
event = (WorkflowActionEvent) queue.poll();
assertNotNull(event);
assertEquals(EventStatus.SUSPEND, event.getEventStatus());
assertEquals(AppType.WORKFLOW_ACTION, event.getAppType());
assertEquals(action.getId(), event.getId());
assertEquals(job.getUser(), event.getUser());
assertEquals(action.getName(), event.getAppName());
assertEquals(0, queue.size());
// Killing job
action.setStatus(WorkflowAction.Status.KILLED);
action.setPendingOnly();
action.setEndTime(null); //its already set by XTestCase add action record method above
WorkflowActionQueryExecutor.getInstance().executeUpdate(WorkflowActionQuery.UPDATE_ACTION_END, action);
new ActionKillXCommand(action.getId()).call();
action = jpaService.execute(wfActionGetCmd);
assertEquals(WorkflowAction.Status.KILLED, action.getStatus());
assertEquals(1, queue.size());
event = (WorkflowActionEvent) queue.poll();
assertNotNull(event);
assertEquals(EventStatus.FAILURE, event.getEventStatus());
assertEquals(AppType.WORKFLOW_ACTION, event.getAppType());
assertEquals(action.getId(), event.getId());
assertEquals(job.getUser(), event.getUser());
assertEquals(action.getName(), event.getAppName());
assertEquals(action.getStartTime(), event.getStartTime());
assertNotNull(action.getEndTime());
assertNotNull(event.getEndTime());
assertEquals(action.getEndTime(), event.getEndTime());
assertEquals(0, queue.size());
}
@Test
public void testWorkflowJobEventError() throws Exception {
final WorkflowJobBean job = addRecordToWfJobTable(WorkflowJob.Status.FAILED, WorkflowInstance.Status.FAILED);
// create event with error code and message
WorkflowXCommand<Void> myCmd = new KillXCommand(job.getId()) {
@Override
protected Void execute() {
WorkflowXCommand.generateEvent(job, "errorCode", "errorMsg");
return null;
}
};
myCmd.call();
WorkflowJobEvent event = (WorkflowJobEvent) queue.poll();
assertNotNull(event);
assertEquals("errorCode", event.getErrorCode());
assertEquals("errorMsg", event.getErrorMessage());
assertEquals(EventStatus.FAILURE, event.getEventStatus());
}
@Test
public void testCoordinatorActionEventDependencies() throws Exception {
final CoordinatorJobBean coord = addRecordToCoordJobTable(CoordinatorJob.Status.RUNNING, false, false);
final CoordinatorActionBean action = addRecordToCoordActionTable(coord.getId(), 1,
CoordinatorAction.Status.RUNNING, "coord-action-get.xml", 0);
WorkflowJobBean wjb = new WorkflowJobBean();
wjb.setId(action.getExternalId());
wjb.setLastModifiedTime(new Date());
WorkflowJobQueryExecutor.getInstance().insert(wjb);
CoordinatorXCommand<Void> myCmd = new CoordActionCheckXCommand(action.getId(), 0) {
@Override
protected Void execute() {
CoordinatorXCommand.generateEvent(action, coord.getUser(), coord.getAppName(), null);
return null;
}
};
// CASE 1: Only pull missing deps
action.setMissingDependencies("pull");
CoordActionQueryExecutor.getInstance().executeUpdate(CoordActionQuery.UPDATE_COORD_ACTION_DEPENDENCIES, action);
myCmd.call();
CoordinatorActionEvent event = (CoordinatorActionEvent) queue.poll();
assertNotNull(event);
assertEquals("pull", event.getMissingDeps());
// CASE 2: Only hcat (push) missing deps
action.setMissingDependencies(null);
action.setPushMissingDependencies("push");
CoordActionQueryExecutor.getInstance().executeUpdate(CoordActionQuery.UPDATE_COORD_ACTION_DEPENDENCIES, action);
myCmd.call();
event = (CoordinatorActionEvent) queue.poll();
assertNotNull(event);
assertEquals("push", event.getMissingDeps());
// CASE 3: Both types
action.setMissingDependencies("pull");
CoordActionQueryExecutor.getInstance().executeUpdate(CoordActionQuery.UPDATE_COORD_ACTION_DEPENDENCIES, action);
myCmd.call();
event = (CoordinatorActionEvent) queue.poll();
assertNotNull(event);
assertEquals("pull" + CoordELFunctions.INSTANCE_SEPARATOR + "push", event.getMissingDeps());
}
@Test
public void testForNoDuplicatesWorkflowEvents() throws Exception {
// test workflow job events
Reader reader = IOUtils.getResourceAsReader("wf-no-op.xml", -1);
Writer writer = new OutputStreamWriter(new FileOutputStream(getTestCaseDir() + "/workflow.xml"),
StandardCharsets.UTF_8);
IOUtils.copyCharStream(reader, writer);
final DagEngine engine = new DagEngine(getTestUser());
Configuration conf = new XConfiguration();
conf.set(OozieClient.APP_PATH, getTestCaseFileUri("workflow.xml"));
conf.set(OozieClient.USER_NAME, getTestUser());
final String jobId1 = engine.submitJob(conf, true);
final WorkflowJobGetJPAExecutor readCmd = new WorkflowJobGetJPAExecutor(jobId1);
waitFor(1 * 100, new Predicate() {
@Override
public boolean evaluate() throws Exception {
return jpaService.execute(readCmd).getStatus() == WorkflowJob.Status.SUCCEEDED;
}
});
assertEquals(2, queue.size());
assertEquals(EventStatus.STARTED, ((JobEvent)queue.poll()).getEventStatus());
assertEquals(EventStatus.SUCCESS, ((JobEvent)queue.poll()).getEventStatus());
queue.clear();
}
@Test
public void testForNoDuplicatesCoordinatorActionEvents() throws Exception {
// test coordinator action events (failure case)
Date startTime = DateUtils.parseDateOozieTZ("2009-02-01T23:59Z");
Date endTime = DateUtils.parseDateOozieTZ("2009-02-02T23:59Z");
CoordinatorJobBean coord = addRecordToCoordJobTable(CoordinatorJob.Status.RUNNING, startTime, endTime, false,
false, 0);
_modifyCoordForFailureAction(coord, "wf-invalid-fork.xml");
new CoordMaterializeTransitionXCommand(coord.getId(), 3600).call();
final CoordJobGetJPAExecutor readCmd1 = new CoordJobGetJPAExecutor(coord.getId());
waitFor(1 * 100, new Predicate() {
@Override
public boolean evaluate() throws Exception {
CoordinatorJobBean bean = jpaService.execute(readCmd1);
return bean.getStatus() == CoordinatorJob.Status.SUCCEEDED
|| bean.getStatus() == CoordinatorJob.Status.KILLED;
}
});
assertEquals(2, queue.size());
assertEquals(EventStatus.WAITING, ((JobEvent)queue.poll()).getEventStatus());
assertEquals(EventStatus.FAILURE, ((JobEvent)queue.poll()).getEventStatus());
queue.clear();
}
@Test
public void testInvalidXMLCoordinatorFailsForNoDuplicates() throws Exception {
Date startTime = DateUtils.parseDateOozieTZ("2009-02-01T23:59Z");
Date endTime = DateUtils.parseDateOozieTZ("2009-02-02T23:59Z");
// test coordinator action events (failure from ActionStartX)
ehs.getAppTypes().add("workflow_action");
CoordinatorJobBean coord = addRecordToCoordJobTable(CoordinatorJob.Status.RUNNING, startTime, endTime, false, false, 0);
CoordinatorActionBean action = addRecordToCoordActionTable(coord.getId(), 1, CoordinatorAction.Status.RUNNING,
"coord-action-sla1.xml", 0);
WorkflowJobBean wf = addRecordToWfJobTable(WorkflowJob.Status.RUNNING, WorkflowInstance.Status.RUNNING,
action.getId());
action.setExternalId(wf.getId());
CoordActionQueryExecutor.getInstance().executeUpdate(CoordActionQuery.UPDATE_COORD_ACTION, action);
String waId = _createWorkflowAction(wf.getId(), "wf-action");
new ActionStartXCommand(waId, action.getType()).call();
final CoordJobGetJPAExecutor readCmd2 = new CoordJobGetJPAExecutor(coord.getId());
waitFor(1 * 100, new Predicate() {
@Override
public boolean evaluate() throws Exception {
return jpaService.execute(readCmd2).getStatus() == CoordinatorJob.Status.KILLED;
}
});
assertEquals(3, queue.size());
HashMap<AppType,JobEvent> eventsMap = new HashMap<AppType,JobEvent>();
while (queue.size() > 0){
JobEvent event = (JobEvent) queue.poll();
eventsMap.put(event.getAppType(), event);
}
assertEquals(3, eventsMap.size());
//Check the WF action
{
JobEvent wfActionEvent = eventsMap.get(AppType.WORKFLOW_ACTION);
assertNotNull("There should be a WF action", wfActionEvent);
assertEquals(EventStatus.FAILURE, wfActionEvent.getEventStatus());
assertEquals(waId, wfActionEvent.getId());
assertEquals(AppType.WORKFLOW_ACTION, wfActionEvent.getAppType());
}
//Check the WF job
{
JobEvent wfJobEvent = eventsMap.get(AppType.WORKFLOW_JOB);
assertNotNull("There should be a WF job", wfJobEvent);
assertEquals(EventStatus.FAILURE, wfJobEvent.getEventStatus());
assertEquals(wf.getId(), wfJobEvent.getId());
assertEquals(AppType.WORKFLOW_JOB, wfJobEvent.getAppType());
}
//Check the Coordinator action
{
JobEvent coordActionEvent = eventsMap.get(AppType.COORDINATOR_ACTION);
assertNotNull("There should be a Coordinator action", coordActionEvent);
assertEquals(EventStatus.FAILURE, coordActionEvent.getEventStatus());
assertEquals(action.getId(), coordActionEvent.getId());
assertEquals(AppType.COORDINATOR_ACTION, coordActionEvent.getAppType());
}
queue.clear();
}
private class ActionCheckXCommandForTest extends ActionCheckXCommand {
ActionExecutor.Context context;
ActionExecutor executor;
WorkflowActionBean action;
JPAService jpa;
public ActionCheckXCommandForTest(ActionExecutor.Context context, ActionExecutor executor, String actionId)
throws JPAExecutorException {
super(actionId);
this.context = context;
this.executor = executor;
jpa = Services.get().get(JPAService.class);
this.action = jpa.execute(new WorkflowActionGetJPAExecutor(actionId));
}
@Override
public Void execute() throws CommandException {
handleNonTransient(context, executor, WorkflowAction.Status.START_MANUAL);
action = (WorkflowActionBean) ((ActionExecutorContext) context).getAction();
if (!(executor instanceof ControlNodeActionExecutor) && EventHandlerService.isEnabled()) {
generateEvent(action, getTestUser());
}
return null;
}
public WorkflowActionBean getAction() {
return action;
}
}
private WorkflowJobBean _createWorkflowJob() throws Exception {
LiteWorkflowApp app = new LiteWorkflowApp("my-app", "<workflow-app/>",
new StartNodeDef(TestControlNodeHandler.class, "one"))
.addNode(new ActionNodeDef("one", "<java></java>", TestActionNodeHandler.class, "end", "end"))
.addNode(new EndNodeDef("end", TestControlNodeHandler.class));
Configuration conf = new Configuration();
Path appUri = new Path(getAppPath(), "workflow.xml");
conf.set(OozieClient.APP_PATH, appUri.toString());
conf.set(OozieClient.LOG_TOKEN, "testToken");
conf.set(OozieClient.USER_NAME, getTestUser());
WorkflowJobBean workflow = createWorkflow(app, conf, WorkflowJob.Status.PREP,
WorkflowInstance.Status.PREP);
String executionPath = "/";
assertNotNull(jpaService);
WorkflowJobInsertJPAExecutor wfInsertCmd = new WorkflowJobInsertJPAExecutor(workflow);
jpaService.execute(wfInsertCmd);
WorkflowActionBean wfAction = addRecordToWfActionTable(workflow.getId(), "one", WorkflowAction.Status.OK,
executionPath, true);
wfAction.setPending();
wfAction.setSignalValue(WorkflowAction.Status.OK.name());
WorkflowActionQueryExecutor.getInstance().executeUpdate(WorkflowActionQuery.UPDATE_ACTION, wfAction);
return workflow;
}
private void _modifyCoordForFailureAction(CoordinatorJobBean coord, String resourceXml) throws Exception {
String wfXml = IOUtils.getResourceAsString(resourceXml, -1);
writeToFile(wfXml, getFsTestCaseDir(), "workflow.xml");
String coordXml = coord.getJobXml();
coord.setJobXml(coordXml.replace("hdfs:///tmp/workflows/", getFsTestCaseDir() + "/workflow.xml"));
CoordJobQueryExecutor.getInstance().executeUpdate(CoordJobQuery.UPDATE_COORD_JOB, coord);
}
private String _createWorkflowAction(String wfId, String actionName) throws JPAExecutorException {
WorkflowActionBean action = new WorkflowActionBean();
action.setName(actionName);
action.setId(Services.get().get(UUIDService.class).generateChildId(wfId, actionName));
action.setJobId(wfId);
action.setType("java");
action.setTransition("transition");
action.setStatus(WorkflowAction.Status.PREP);
action.setStartTime(new Date());
action.setEndTime(new Date());
action.setLastCheckTime(new Date());
action.setPendingOnly();
String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<main-class>" + "${dummy}" + "</java>";
action.setConf(actionXml);
jpaService.execute(new WorkflowActionInsertJPAExecutor(action));
return action.getId();
}
private void waitForEventGeneration(int wait) {
waitFor(wait, new Predicate() {
@Override
public boolean evaluate() throws Exception {
return ehs.getEventQueue().peek() != null;
}
});
}
}
|
googleapis/google-cloud-java | 36,045 | java-notebooks/proto-google-cloud-notebooks-v1/src/main/java/com/google/cloud/notebooks/v1/VirtualMachine.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/notebooks/v1/runtime.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.notebooks.v1;
/**
*
*
* <pre>
* Runtime using Virtual Machine for computing.
* </pre>
*
* Protobuf type {@code google.cloud.notebooks.v1.VirtualMachine}
*/
public final class VirtualMachine extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.notebooks.v1.VirtualMachine)
VirtualMachineOrBuilder {
private static final long serialVersionUID = 0L;
// Use VirtualMachine.newBuilder() to construct.
private VirtualMachine(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private VirtualMachine() {
instanceName_ = "";
instanceId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new VirtualMachine();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.notebooks.v1.RuntimeProto
.internal_static_google_cloud_notebooks_v1_VirtualMachine_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.notebooks.v1.RuntimeProto
.internal_static_google_cloud_notebooks_v1_VirtualMachine_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.notebooks.v1.VirtualMachine.class,
com.google.cloud.notebooks.v1.VirtualMachine.Builder.class);
}
private int bitField0_;
public static final int INSTANCE_NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object instanceName_ = "";
/**
*
*
* <pre>
* Output only. The user-friendly name of the Managed Compute Engine instance.
* </pre>
*
* <code>string instance_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The instanceName.
*/
@java.lang.Override
public java.lang.String getInstanceName() {
java.lang.Object ref = instanceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
instanceName_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. The user-friendly name of the Managed Compute Engine instance.
* </pre>
*
* <code>string instance_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for instanceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getInstanceNameBytes() {
java.lang.Object ref = instanceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
instanceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int INSTANCE_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object instanceId_ = "";
/**
*
*
* <pre>
* Output only. The unique identifier of the Managed Compute Engine instance.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The instanceId.
*/
@java.lang.Override
public java.lang.String getInstanceId() {
java.lang.Object ref = instanceId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
instanceId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. The unique identifier of the Managed Compute Engine instance.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for instanceId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getInstanceIdBytes() {
java.lang.Object ref = instanceId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
instanceId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int VIRTUAL_MACHINE_CONFIG_FIELD_NUMBER = 3;
private com.google.cloud.notebooks.v1.VirtualMachineConfig virtualMachineConfig_;
/**
*
*
* <pre>
* Virtual Machine configuration settings.
* </pre>
*
* <code>.google.cloud.notebooks.v1.VirtualMachineConfig virtual_machine_config = 3;</code>
*
* @return Whether the virtualMachineConfig field is set.
*/
@java.lang.Override
public boolean hasVirtualMachineConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Virtual Machine configuration settings.
* </pre>
*
* <code>.google.cloud.notebooks.v1.VirtualMachineConfig virtual_machine_config = 3;</code>
*
* @return The virtualMachineConfig.
*/
@java.lang.Override
public com.google.cloud.notebooks.v1.VirtualMachineConfig getVirtualMachineConfig() {
return virtualMachineConfig_ == null
? com.google.cloud.notebooks.v1.VirtualMachineConfig.getDefaultInstance()
: virtualMachineConfig_;
}
/**
*
*
* <pre>
* Virtual Machine configuration settings.
* </pre>
*
* <code>.google.cloud.notebooks.v1.VirtualMachineConfig virtual_machine_config = 3;</code>
*/
@java.lang.Override
public com.google.cloud.notebooks.v1.VirtualMachineConfigOrBuilder
getVirtualMachineConfigOrBuilder() {
return virtualMachineConfig_ == null
? com.google.cloud.notebooks.v1.VirtualMachineConfig.getDefaultInstance()
: virtualMachineConfig_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, instanceName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, instanceId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getVirtualMachineConfig());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, instanceName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, instanceId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(3, getVirtualMachineConfig());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.notebooks.v1.VirtualMachine)) {
return super.equals(obj);
}
com.google.cloud.notebooks.v1.VirtualMachine other =
(com.google.cloud.notebooks.v1.VirtualMachine) obj;
if (!getInstanceName().equals(other.getInstanceName())) return false;
if (!getInstanceId().equals(other.getInstanceId())) return false;
if (hasVirtualMachineConfig() != other.hasVirtualMachineConfig()) return false;
if (hasVirtualMachineConfig()) {
if (!getVirtualMachineConfig().equals(other.getVirtualMachineConfig())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + INSTANCE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getInstanceName().hashCode();
hash = (37 * hash) + INSTANCE_ID_FIELD_NUMBER;
hash = (53 * hash) + getInstanceId().hashCode();
if (hasVirtualMachineConfig()) {
hash = (37 * hash) + VIRTUAL_MACHINE_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getVirtualMachineConfig().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.notebooks.v1.VirtualMachine parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.notebooks.v1.VirtualMachine parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.notebooks.v1.VirtualMachine parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.notebooks.v1.VirtualMachine parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.notebooks.v1.VirtualMachine parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.notebooks.v1.VirtualMachine parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.notebooks.v1.VirtualMachine parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.notebooks.v1.VirtualMachine parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.notebooks.v1.VirtualMachine parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.notebooks.v1.VirtualMachine parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.notebooks.v1.VirtualMachine parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.notebooks.v1.VirtualMachine parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.notebooks.v1.VirtualMachine prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Runtime using Virtual Machine for computing.
* </pre>
*
* Protobuf type {@code google.cloud.notebooks.v1.VirtualMachine}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.notebooks.v1.VirtualMachine)
com.google.cloud.notebooks.v1.VirtualMachineOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.notebooks.v1.RuntimeProto
.internal_static_google_cloud_notebooks_v1_VirtualMachine_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.notebooks.v1.RuntimeProto
.internal_static_google_cloud_notebooks_v1_VirtualMachine_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.notebooks.v1.VirtualMachine.class,
com.google.cloud.notebooks.v1.VirtualMachine.Builder.class);
}
// Construct using com.google.cloud.notebooks.v1.VirtualMachine.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getVirtualMachineConfigFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
instanceName_ = "";
instanceId_ = "";
virtualMachineConfig_ = null;
if (virtualMachineConfigBuilder_ != null) {
virtualMachineConfigBuilder_.dispose();
virtualMachineConfigBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.notebooks.v1.RuntimeProto
.internal_static_google_cloud_notebooks_v1_VirtualMachine_descriptor;
}
@java.lang.Override
public com.google.cloud.notebooks.v1.VirtualMachine getDefaultInstanceForType() {
return com.google.cloud.notebooks.v1.VirtualMachine.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.notebooks.v1.VirtualMachine build() {
com.google.cloud.notebooks.v1.VirtualMachine result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.notebooks.v1.VirtualMachine buildPartial() {
com.google.cloud.notebooks.v1.VirtualMachine result =
new com.google.cloud.notebooks.v1.VirtualMachine(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.notebooks.v1.VirtualMachine result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.instanceName_ = instanceName_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.instanceId_ = instanceId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.virtualMachineConfig_ =
virtualMachineConfigBuilder_ == null
? virtualMachineConfig_
: virtualMachineConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.notebooks.v1.VirtualMachine) {
return mergeFrom((com.google.cloud.notebooks.v1.VirtualMachine) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.notebooks.v1.VirtualMachine other) {
if (other == com.google.cloud.notebooks.v1.VirtualMachine.getDefaultInstance()) return this;
if (!other.getInstanceName().isEmpty()) {
instanceName_ = other.instanceName_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getInstanceId().isEmpty()) {
instanceId_ = other.instanceId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasVirtualMachineConfig()) {
mergeVirtualMachineConfig(other.getVirtualMachineConfig());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
instanceName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
instanceId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(
getVirtualMachineConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object instanceName_ = "";
/**
*
*
* <pre>
* Output only. The user-friendly name of the Managed Compute Engine instance.
* </pre>
*
* <code>string instance_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The instanceName.
*/
public java.lang.String getInstanceName() {
java.lang.Object ref = instanceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
instanceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. The user-friendly name of the Managed Compute Engine instance.
* </pre>
*
* <code>string instance_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for instanceName.
*/
public com.google.protobuf.ByteString getInstanceNameBytes() {
java.lang.Object ref = instanceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
instanceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. The user-friendly name of the Managed Compute Engine instance.
* </pre>
*
* <code>string instance_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The instanceName to set.
* @return This builder for chaining.
*/
public Builder setInstanceName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
instanceName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The user-friendly name of the Managed Compute Engine instance.
* </pre>
*
* <code>string instance_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearInstanceName() {
instanceName_ = getDefaultInstance().getInstanceName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The user-friendly name of the Managed Compute Engine instance.
* </pre>
*
* <code>string instance_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for instanceName to set.
* @return This builder for chaining.
*/
public Builder setInstanceNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
instanceName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object instanceId_ = "";
/**
*
*
* <pre>
* Output only. The unique identifier of the Managed Compute Engine instance.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The instanceId.
*/
public java.lang.String getInstanceId() {
java.lang.Object ref = instanceId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
instanceId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. The unique identifier of the Managed Compute Engine instance.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for instanceId.
*/
public com.google.protobuf.ByteString getInstanceIdBytes() {
java.lang.Object ref = instanceId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
instanceId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. The unique identifier of the Managed Compute Engine instance.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The instanceId to set.
* @return This builder for chaining.
*/
public Builder setInstanceId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
instanceId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The unique identifier of the Managed Compute Engine instance.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearInstanceId() {
instanceId_ = getDefaultInstance().getInstanceId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The unique identifier of the Managed Compute Engine instance.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for instanceId to set.
* @return This builder for chaining.
*/
public Builder setInstanceIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
instanceId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.notebooks.v1.VirtualMachineConfig virtualMachineConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.notebooks.v1.VirtualMachineConfig,
com.google.cloud.notebooks.v1.VirtualMachineConfig.Builder,
com.google.cloud.notebooks.v1.VirtualMachineConfigOrBuilder>
virtualMachineConfigBuilder_;
/**
*
*
* <pre>
* Virtual Machine configuration settings.
* </pre>
*
* <code>.google.cloud.notebooks.v1.VirtualMachineConfig virtual_machine_config = 3;</code>
*
* @return Whether the virtualMachineConfig field is set.
*/
public boolean hasVirtualMachineConfig() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Virtual Machine configuration settings.
* </pre>
*
* <code>.google.cloud.notebooks.v1.VirtualMachineConfig virtual_machine_config = 3;</code>
*
* @return The virtualMachineConfig.
*/
public com.google.cloud.notebooks.v1.VirtualMachineConfig getVirtualMachineConfig() {
if (virtualMachineConfigBuilder_ == null) {
return virtualMachineConfig_ == null
? com.google.cloud.notebooks.v1.VirtualMachineConfig.getDefaultInstance()
: virtualMachineConfig_;
} else {
return virtualMachineConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Virtual Machine configuration settings.
* </pre>
*
* <code>.google.cloud.notebooks.v1.VirtualMachineConfig virtual_machine_config = 3;</code>
*/
public Builder setVirtualMachineConfig(
com.google.cloud.notebooks.v1.VirtualMachineConfig value) {
if (virtualMachineConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
virtualMachineConfig_ = value;
} else {
virtualMachineConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Virtual Machine configuration settings.
* </pre>
*
* <code>.google.cloud.notebooks.v1.VirtualMachineConfig virtual_machine_config = 3;</code>
*/
public Builder setVirtualMachineConfig(
com.google.cloud.notebooks.v1.VirtualMachineConfig.Builder builderForValue) {
if (virtualMachineConfigBuilder_ == null) {
virtualMachineConfig_ = builderForValue.build();
} else {
virtualMachineConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Virtual Machine configuration settings.
* </pre>
*
* <code>.google.cloud.notebooks.v1.VirtualMachineConfig virtual_machine_config = 3;</code>
*/
public Builder mergeVirtualMachineConfig(
com.google.cloud.notebooks.v1.VirtualMachineConfig value) {
if (virtualMachineConfigBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& virtualMachineConfig_ != null
&& virtualMachineConfig_
!= com.google.cloud.notebooks.v1.VirtualMachineConfig.getDefaultInstance()) {
getVirtualMachineConfigBuilder().mergeFrom(value);
} else {
virtualMachineConfig_ = value;
}
} else {
virtualMachineConfigBuilder_.mergeFrom(value);
}
if (virtualMachineConfig_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Virtual Machine configuration settings.
* </pre>
*
* <code>.google.cloud.notebooks.v1.VirtualMachineConfig virtual_machine_config = 3;</code>
*/
public Builder clearVirtualMachineConfig() {
bitField0_ = (bitField0_ & ~0x00000004);
virtualMachineConfig_ = null;
if (virtualMachineConfigBuilder_ != null) {
virtualMachineConfigBuilder_.dispose();
virtualMachineConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Virtual Machine configuration settings.
* </pre>
*
* <code>.google.cloud.notebooks.v1.VirtualMachineConfig virtual_machine_config = 3;</code>
*/
public com.google.cloud.notebooks.v1.VirtualMachineConfig.Builder
getVirtualMachineConfigBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getVirtualMachineConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Virtual Machine configuration settings.
* </pre>
*
* <code>.google.cloud.notebooks.v1.VirtualMachineConfig virtual_machine_config = 3;</code>
*/
public com.google.cloud.notebooks.v1.VirtualMachineConfigOrBuilder
getVirtualMachineConfigOrBuilder() {
if (virtualMachineConfigBuilder_ != null) {
return virtualMachineConfigBuilder_.getMessageOrBuilder();
} else {
return virtualMachineConfig_ == null
? com.google.cloud.notebooks.v1.VirtualMachineConfig.getDefaultInstance()
: virtualMachineConfig_;
}
}
/**
*
*
* <pre>
* Virtual Machine configuration settings.
* </pre>
*
* <code>.google.cloud.notebooks.v1.VirtualMachineConfig virtual_machine_config = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.notebooks.v1.VirtualMachineConfig,
com.google.cloud.notebooks.v1.VirtualMachineConfig.Builder,
com.google.cloud.notebooks.v1.VirtualMachineConfigOrBuilder>
getVirtualMachineConfigFieldBuilder() {
if (virtualMachineConfigBuilder_ == null) {
virtualMachineConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.notebooks.v1.VirtualMachineConfig,
com.google.cloud.notebooks.v1.VirtualMachineConfig.Builder,
com.google.cloud.notebooks.v1.VirtualMachineConfigOrBuilder>(
getVirtualMachineConfig(), getParentForChildren(), isClean());
virtualMachineConfig_ = null;
}
return virtualMachineConfigBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.notebooks.v1.VirtualMachine)
}
// @@protoc_insertion_point(class_scope:google.cloud.notebooks.v1.VirtualMachine)
private static final com.google.cloud.notebooks.v1.VirtualMachine DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.notebooks.v1.VirtualMachine();
}
public static com.google.cloud.notebooks.v1.VirtualMachine getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<VirtualMachine> PARSER =
new com.google.protobuf.AbstractParser<VirtualMachine>() {
@java.lang.Override
public VirtualMachine parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<VirtualMachine> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<VirtualMachine> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.notebooks.v1.VirtualMachine getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/flink | 36,306 | flink-runtime/src/main/java/org/apache/flink/runtime/util/ZooKeeperUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.util;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.api.common.JobID;
import org.apache.flink.configuration.ConfigConstants;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.HighAvailabilityOptions;
import org.apache.flink.configuration.IllegalConfigurationException;
import org.apache.flink.configuration.SecurityOptions;
import org.apache.flink.core.execution.RecoveryClaimMode;
import org.apache.flink.runtime.checkpoint.CompletedCheckpoint;
import org.apache.flink.runtime.checkpoint.CompletedCheckpointStore;
import org.apache.flink.runtime.checkpoint.DefaultCompletedCheckpointStore;
import org.apache.flink.runtime.checkpoint.DefaultCompletedCheckpointStoreUtils;
import org.apache.flink.runtime.checkpoint.DefaultLastStateConnectionStateListener;
import org.apache.flink.runtime.checkpoint.ZooKeeperCheckpointIDCounter;
import org.apache.flink.runtime.checkpoint.ZooKeeperCheckpointStoreUtil;
import org.apache.flink.runtime.highavailability.HighAvailabilityServicesUtils;
import org.apache.flink.runtime.highavailability.zookeeper.CuratorFrameworkWithUnhandledErrorListener;
import org.apache.flink.runtime.jobmanager.DefaultExecutionPlanStore;
import org.apache.flink.runtime.jobmanager.ExecutionPlanStore;
import org.apache.flink.runtime.jobmanager.HighAvailabilityMode;
import org.apache.flink.runtime.jobmanager.ZooKeeperExecutionPlanStoreUtil;
import org.apache.flink.runtime.jobmanager.ZooKeeperExecutionPlanStoreWatcher;
import org.apache.flink.runtime.leaderelection.LeaderInformation;
import org.apache.flink.runtime.leaderretrieval.DefaultLeaderRetrievalService;
import org.apache.flink.runtime.leaderretrieval.LeaderRetrievalDriverFactory;
import org.apache.flink.runtime.leaderretrieval.ZooKeeperLeaderRetrievalDriver;
import org.apache.flink.runtime.leaderretrieval.ZooKeeperLeaderRetrievalDriverFactory;
import org.apache.flink.runtime.persistence.RetrievableStateStorageHelper;
import org.apache.flink.runtime.persistence.filesystem.FileSystemStateStorageHelper;
import org.apache.flink.runtime.rpc.FatalErrorHandler;
import org.apache.flink.runtime.state.SharedStateRegistryFactory;
import org.apache.flink.runtime.zookeeper.ZooKeeperStateHandleStore;
import org.apache.flink.streaming.api.graph.ExecutionPlan;
import org.apache.flink.util.concurrent.Executors;
import org.apache.flink.util.function.RunnableWithException;
import org.apache.flink.shaded.curator5.org.apache.curator.framework.AuthInfo;
import org.apache.flink.shaded.curator5.org.apache.curator.framework.CuratorFramework;
import org.apache.flink.shaded.curator5.org.apache.curator.framework.CuratorFrameworkFactory;
import org.apache.flink.shaded.curator5.org.apache.curator.framework.api.ACLProvider;
import org.apache.flink.shaded.curator5.org.apache.curator.framework.api.UnhandledErrorListener;
import org.apache.flink.shaded.curator5.org.apache.curator.framework.imps.DefaultACLProvider;
import org.apache.flink.shaded.curator5.org.apache.curator.framework.recipes.cache.PathChildrenCache;
import org.apache.flink.shaded.curator5.org.apache.curator.framework.recipes.cache.TreeCache;
import org.apache.flink.shaded.curator5.org.apache.curator.framework.recipes.cache.TreeCacheListener;
import org.apache.flink.shaded.curator5.org.apache.curator.framework.recipes.cache.TreeCacheSelector;
import org.apache.flink.shaded.curator5.org.apache.curator.framework.state.SessionConnectionStateErrorPolicy;
import org.apache.flink.shaded.curator5.org.apache.curator.retry.ExponentialBackoffRetry;
import org.apache.flink.shaded.zookeeper3.org.apache.zookeeper.CreateMode;
import org.apache.flink.shaded.zookeeper3.org.apache.zookeeper.KeeperException;
import org.apache.flink.shaded.zookeeper3.org.apache.zookeeper.ZooDefs;
import org.apache.flink.shaded.zookeeper3.org.apache.zookeeper.data.ACL;
import org.apache.flink.shaded.zookeeper3.org.apache.zookeeper.data.Stat;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.Executor;
import java.util.function.BooleanSupplier;
import java.util.stream.Collectors;
import static org.apache.flink.util.Preconditions.checkNotNull;
/** Class containing helper functions to interact with ZooKeeper. */
public class ZooKeeperUtils {
private static final Logger LOG = LoggerFactory.getLogger(ZooKeeperUtils.class);
/** The prefix of the submitted job graph file. */
public static final String HA_STORAGE_SUBMITTED_EXECUTION_PLAN_PREFIX =
"submittedExecutionPlan";
/** The prefix of the completed checkpoint file. */
public static final String HA_STORAGE_COMPLETED_CHECKPOINT = "completedCheckpoint";
/** The prefix of the resource manager node. */
public static final String RESOURCE_MANAGER_NODE = "resource_manager";
private static final String DISPATCHER_NODE = "dispatcher";
private static final String LEADER_NODE = "leader";
private static final String REST_SERVER_NODE = "rest_server";
private static final String LEADER_LATCH_NODE = "latch";
private static final String CONNECTION_INFO_NODE = "connection_info";
public static String getLeaderPathForJob(JobID jobId) {
return generateZookeeperPath(getJobsPath(), getPathForJob(jobId));
}
public static String getJobsPath() {
return "/jobs";
}
private static String getCheckpointsPath() {
return "/checkpoints";
}
public static String getCheckpointIdCounterPath() {
return "/checkpoint_id_counter";
}
public static String getLeaderPath() {
return generateZookeeperPath(LEADER_NODE);
}
public static String getDispatcherNode() {
return DISPATCHER_NODE;
}
public static String getResourceManagerNode() {
return RESOURCE_MANAGER_NODE;
}
public static String getRestServerNode() {
return REST_SERVER_NODE;
}
public static String getLeaderLatchPath() {
return generateZookeeperPath(LEADER_LATCH_NODE);
}
public static String getLeaderPath(String suffix) {
return generateZookeeperPath(LEADER_NODE, suffix);
}
public static String generateConnectionInformationPath(String path) {
return generateZookeeperPath(path, CONNECTION_INFO_NODE);
}
public static boolean isConnectionInfoPath(String path) {
return path.endsWith(CONNECTION_INFO_NODE);
}
public static String generateLeaderLatchPath(String path) {
return generateZookeeperPath(path, LEADER_LATCH_NODE);
}
/**
* Starts a {@link CuratorFramework} instance and connects it to the given ZooKeeper quorum.
*
* @param configuration {@link Configuration} object containing the configuration values
* @param fatalErrorHandler {@link FatalErrorHandler} fatalErrorHandler to handle unexpected
* errors of {@link CuratorFramework}
* @return {@link CuratorFrameworkWithUnhandledErrorListener} instance
*/
public static CuratorFrameworkWithUnhandledErrorListener startCuratorFramework(
Configuration configuration, FatalErrorHandler fatalErrorHandler) {
checkNotNull(configuration, "configuration");
String zkQuorum = configuration.getValue(HighAvailabilityOptions.HA_ZOOKEEPER_QUORUM);
if (zkQuorum == null || StringUtils.isBlank(zkQuorum)) {
throw new RuntimeException(
"No valid ZooKeeper quorum has been specified. "
+ "You can specify the quorum via the configuration key '"
+ HighAvailabilityOptions.HA_ZOOKEEPER_QUORUM.key()
+ "'.");
}
int sessionTimeout =
Math.toIntExact(
configuration
.get(HighAvailabilityOptions.ZOOKEEPER_SESSION_TIMEOUT)
.toMillis());
int connectionTimeout =
Math.toIntExact(
configuration
.get(HighAvailabilityOptions.ZOOKEEPER_CONNECTION_TIMEOUT)
.toMillis());
int retryWait =
Math.toIntExact(
configuration.get(HighAvailabilityOptions.ZOOKEEPER_RETRY_WAIT).toMillis());
int maxRetryAttempts =
configuration.get(HighAvailabilityOptions.ZOOKEEPER_MAX_RETRY_ATTEMPTS);
String root = configuration.getValue(HighAvailabilityOptions.HA_ZOOKEEPER_ROOT);
String namespace = configuration.getValue(HighAvailabilityOptions.HA_CLUSTER_ID);
boolean disableSaslClient = configuration.get(SecurityOptions.ZOOKEEPER_SASL_DISABLE);
ACLProvider aclProvider;
ZkClientACLMode aclMode = ZkClientACLMode.fromConfig(configuration);
if (disableSaslClient && aclMode == ZkClientACLMode.CREATOR) {
String errorMessage =
"Cannot set ACL role to "
+ ZkClientACLMode.CREATOR
+ " since SASL authentication is "
+ "disabled through the "
+ SecurityOptions.ZOOKEEPER_SASL_DISABLE.key()
+ " property";
LOG.warn(errorMessage);
throw new IllegalConfigurationException(errorMessage);
}
if (aclMode == ZkClientACLMode.CREATOR) {
LOG.info("Enforcing creator for ZK connections");
aclProvider = new SecureAclProvider();
} else {
LOG.info("Enforcing default ACL for ZK connections");
aclProvider = new DefaultACLProvider();
}
String rootWithNamespace = generateZookeeperPath(root, namespace);
LOG.info("Using '{}' as Zookeeper namespace.", rootWithNamespace);
boolean ensembleTracking =
configuration.get(HighAvailabilityOptions.ZOOKEEPER_ENSEMBLE_TRACKING);
final CuratorFrameworkFactory.Builder curatorFrameworkBuilder =
CuratorFrameworkFactory.builder()
.connectString(zkQuorum)
.sessionTimeoutMs(sessionTimeout)
.connectionTimeoutMs(connectionTimeout)
.retryPolicy(new ExponentialBackoffRetry(retryWait, maxRetryAttempts))
// Curator prepends a '/' manually and throws an Exception if the
// namespace starts with a '/'.
.namespace(trimStartingSlash(rootWithNamespace))
.ensembleTracker(ensembleTracking)
.aclProvider(aclProvider);
if (configuration.contains(HighAvailabilityOptions.ZOOKEEPER_CLIENT_AUTHORIZATION)) {
Map<String, String> authMap =
configuration.get(HighAvailabilityOptions.ZOOKEEPER_CLIENT_AUTHORIZATION);
List<AuthInfo> authInfos =
authMap.entrySet().stream()
.map(
entry ->
new AuthInfo(
entry.getKey(),
entry.getValue()
.getBytes(
ConfigConstants
.DEFAULT_CHARSET)))
.collect(Collectors.toList());
curatorFrameworkBuilder.authorization(authInfos);
}
if (configuration.contains(HighAvailabilityOptions.ZOOKEEPER_MAX_CLOSE_WAIT)) {
long maxCloseWait =
configuration.get(HighAvailabilityOptions.ZOOKEEPER_MAX_CLOSE_WAIT).toMillis();
if (maxCloseWait < 0 || maxCloseWait > Integer.MAX_VALUE) {
throw new IllegalConfigurationException(
"The value (%d ms) is out-of-range for %s. The milliseconds timeout is expected to be between 0 and %d ms.",
maxCloseWait,
HighAvailabilityOptions.ZOOKEEPER_MAX_CLOSE_WAIT.key(),
Integer.MAX_VALUE);
}
curatorFrameworkBuilder.maxCloseWaitMs((int) maxCloseWait);
}
if (configuration.contains(
HighAvailabilityOptions.ZOOKEEPER_SIMULATED_SESSION_EXP_PERCENT)) {
curatorFrameworkBuilder.simulatedSessionExpirationPercent(
configuration.get(
HighAvailabilityOptions.ZOOKEEPER_SIMULATED_SESSION_EXP_PERCENT));
}
if (configuration.get(HighAvailabilityOptions.ZOOKEEPER_TOLERATE_SUSPENDED_CONNECTIONS)) {
curatorFrameworkBuilder.connectionStateErrorPolicy(
new SessionConnectionStateErrorPolicy());
}
return startCuratorFramework(curatorFrameworkBuilder, fatalErrorHandler);
}
/**
* Starts a {@link CuratorFramework} instance and connects it to the given ZooKeeper quorum from
* a builder.
*
* @param builder {@link CuratorFrameworkFactory.Builder} A builder for curatorFramework.
* @param fatalErrorHandler {@link FatalErrorHandler} fatalErrorHandler to handle unexpected
* errors of {@link CuratorFramework}
* @return {@link CuratorFrameworkWithUnhandledErrorListener} instance
*/
@VisibleForTesting
public static CuratorFrameworkWithUnhandledErrorListener startCuratorFramework(
CuratorFrameworkFactory.Builder builder, FatalErrorHandler fatalErrorHandler) {
CuratorFramework cf = builder.build();
UnhandledErrorListener unhandledErrorListener =
(message, throwable) -> {
LOG.error(
"Unhandled error in curator framework, error message: {}",
message,
throwable);
// The exception thrown in UnhandledErrorListener will be caught by
// CuratorFramework. So we mostly trigger exit process or interact with main
// thread to inform the failure in FatalErrorHandler.
fatalErrorHandler.onFatalError(throwable);
};
cf.getUnhandledErrorListenable().addListener(unhandledErrorListener);
cf.start();
return new CuratorFrameworkWithUnhandledErrorListener(cf, unhandledErrorListener);
}
/** Returns whether {@link HighAvailabilityMode#ZOOKEEPER} is configured. */
public static boolean isZooKeeperRecoveryMode(Configuration flinkConf) {
return HighAvailabilityMode.fromConfig(flinkConf).equals(HighAvailabilityMode.ZOOKEEPER);
}
/**
* Returns the configured ZooKeeper quorum (and removes whitespace, because ZooKeeper does not
* tolerate it).
*/
public static String getZooKeeperEnsemble(Configuration flinkConf)
throws IllegalConfigurationException {
String zkQuorum = flinkConf.getValue(HighAvailabilityOptions.HA_ZOOKEEPER_QUORUM);
if (zkQuorum == null || StringUtils.isBlank(zkQuorum)) {
throw new IllegalConfigurationException("No ZooKeeper quorum specified in config.");
}
// Remove all whitespace
zkQuorum = zkQuorum.replaceAll("\\s+", "");
return zkQuorum;
}
/**
* Creates a {@link DefaultLeaderRetrievalService} instance with {@link
* ZooKeeperLeaderRetrievalDriver}.
*
* @param client The {@link CuratorFramework} ZooKeeper client to use
* @return {@link DefaultLeaderRetrievalService} instance.
*/
public static DefaultLeaderRetrievalService createLeaderRetrievalService(
final CuratorFramework client) {
return createLeaderRetrievalService(client, "", new Configuration());
}
/**
* Creates a {@link DefaultLeaderRetrievalService} instance with {@link
* ZooKeeperLeaderRetrievalDriver}.
*
* @param client The {@link CuratorFramework} ZooKeeper client to use
* @param path The path for the leader retrieval
* @param configuration configuration for further config options
* @return {@link DefaultLeaderRetrievalService} instance.
*/
public static DefaultLeaderRetrievalService createLeaderRetrievalService(
final CuratorFramework client, final String path, final Configuration configuration) {
return new DefaultLeaderRetrievalService(
createLeaderRetrievalDriverFactory(client, path, configuration));
}
/**
* Creates a {@link LeaderRetrievalDriverFactory} implemented by ZooKeeper.
*
* @param client The {@link CuratorFramework} ZooKeeper client to use
* @return {@link LeaderRetrievalDriverFactory} instance.
*/
public static ZooKeeperLeaderRetrievalDriverFactory createLeaderRetrievalDriverFactory(
final CuratorFramework client) {
return createLeaderRetrievalDriverFactory(client, "");
}
/**
* Creates a {@link LeaderRetrievalDriverFactory} implemented by ZooKeeper.
*
* @param client The {@link CuratorFramework} ZooKeeper client to use
* @param path The parent path that shall be used by the client.
* @return {@link LeaderRetrievalDriverFactory} instance.
*/
public static ZooKeeperLeaderRetrievalDriverFactory createLeaderRetrievalDriverFactory(
final CuratorFramework client, String path) {
return createLeaderRetrievalDriverFactory(client, path, new Configuration());
}
/**
* Creates a {@link LeaderRetrievalDriverFactory} implemented by ZooKeeper.
*
* @param client The {@link CuratorFramework} ZooKeeper client to use
* @param path The path for the leader zNode
* @param configuration configuration for further config options
* @return {@link LeaderRetrievalDriverFactory} instance.
*/
public static ZooKeeperLeaderRetrievalDriverFactory createLeaderRetrievalDriverFactory(
final CuratorFramework client, final String path, final Configuration configuration) {
final ZooKeeperLeaderRetrievalDriver.LeaderInformationClearancePolicy
leaderInformationClearancePolicy;
if (configuration.get(HighAvailabilityOptions.ZOOKEEPER_TOLERATE_SUSPENDED_CONNECTIONS)) {
leaderInformationClearancePolicy =
ZooKeeperLeaderRetrievalDriver.LeaderInformationClearancePolicy
.ON_LOST_CONNECTION;
} else {
leaderInformationClearancePolicy =
ZooKeeperLeaderRetrievalDriver.LeaderInformationClearancePolicy
.ON_SUSPENDED_CONNECTION;
}
return new ZooKeeperLeaderRetrievalDriverFactory(
client, path, leaderInformationClearancePolicy);
}
public static void writeLeaderInformationToZooKeeper(
LeaderInformation leaderInformation,
CuratorFramework curatorFramework,
BooleanSupplier hasLeadershipCheck,
String connectionInformationPath)
throws Exception {
final byte[] data;
if (leaderInformation.isEmpty()) {
data = null;
} else {
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
final ObjectOutputStream oos = new ObjectOutputStream(baos);
oos.writeUTF(leaderInformation.getLeaderAddress());
oos.writeObject(leaderInformation.getLeaderSessionID());
oos.close();
data = baos.toByteArray();
}
boolean dataWritten = false;
while (!dataWritten && hasLeadershipCheck.getAsBoolean()) {
Stat stat = curatorFramework.checkExists().forPath(connectionInformationPath);
if (stat != null) {
long owner = stat.getEphemeralOwner();
long sessionID =
curatorFramework.getZookeeperClient().getZooKeeper().getSessionId();
if (owner == sessionID) {
try {
curatorFramework.setData().forPath(connectionInformationPath, data);
dataWritten = true;
} catch (KeeperException.NoNodeException noNode) {
// node was deleted in the meantime
}
} else {
try {
curatorFramework.delete().forPath(connectionInformationPath);
} catch (KeeperException.NoNodeException noNode) {
// node was deleted in the meantime --> try again
}
}
} else {
try {
curatorFramework
.create()
.creatingParentsIfNeeded()
.withMode(CreateMode.EPHEMERAL)
.forPath(connectionInformationPath, data);
dataWritten = true;
} catch (KeeperException.NodeExistsException nodeExists) {
// node has been created in the meantime --> try again
}
}
}
}
public static LeaderInformation readLeaderInformation(byte[] data)
throws IOException, ClassNotFoundException {
if (data != null && data.length > 0) {
final ByteArrayInputStream bais = new ByteArrayInputStream(data);
final String leaderAddress;
final UUID leaderSessionID;
try (final ObjectInputStream ois = new ObjectInputStream(bais)) {
leaderAddress = ois.readUTF();
leaderSessionID = (UUID) ois.readObject();
}
return LeaderInformation.known(leaderSessionID, leaderAddress);
} else {
return LeaderInformation.empty();
}
}
/**
* Creates a {@link DefaultExecutionPlanStore} instance with {@link ZooKeeperStateHandleStore},
* {@link ZooKeeperExecutionPlanStoreWatcher} and {@link ZooKeeperExecutionPlanStoreUtil}.
*
* @param client The {@link CuratorFramework} ZooKeeper client to use
* @param configuration {@link Configuration} object
* @return {@link DefaultExecutionPlanStore} instance
* @throws Exception if the submitted execution plan store cannot be created
*/
public static ExecutionPlanStore createExecutionPlans(
CuratorFramework client, Configuration configuration) throws Exception {
checkNotNull(configuration, "Configuration");
RetrievableStateStorageHelper<ExecutionPlan> stateStorage =
createFileSystemStateStorage(
configuration, HA_STORAGE_SUBMITTED_EXECUTION_PLAN_PREFIX);
// ZooKeeper submitted jobs root dir
String zooKeeperJobsPath =
configuration.get(HighAvailabilityOptions.HA_ZOOKEEPER_EXECUTION_PLANS_PATH);
// Ensure that the job graphs path exists
client.newNamespaceAwareEnsurePath(zooKeeperJobsPath).ensure(client.getZookeeperClient());
// All operations will have the path as root
CuratorFramework facade = client.usingNamespace(client.getNamespace() + zooKeeperJobsPath);
final ZooKeeperStateHandleStore<ExecutionPlan> zooKeeperStateHandleStore =
new ZooKeeperStateHandleStore<>(facade, stateStorage);
final PathChildrenCache pathCache = new PathChildrenCache(facade, "/", false);
return new DefaultExecutionPlanStore<>(
zooKeeperStateHandleStore,
new ZooKeeperExecutionPlanStoreWatcher(pathCache),
ZooKeeperExecutionPlanStoreUtil.INSTANCE);
}
/**
* Creates a {@link DefaultCompletedCheckpointStore} instance with {@link
* ZooKeeperStateHandleStore}.
*
* @param client The {@link CuratorFramework} ZooKeeper client to use
* @param configuration {@link Configuration} object
* @param maxNumberOfCheckpointsToRetain The maximum number of checkpoints to retain
* @param executor to run ZooKeeper callbacks
* @param recoveryClaimMode the mode in which the job is being restored
* @return {@link DefaultCompletedCheckpointStore} instance
* @throws Exception if the completed checkpoint store cannot be created
*/
public static CompletedCheckpointStore createCompletedCheckpoints(
CuratorFramework client,
Configuration configuration,
int maxNumberOfCheckpointsToRetain,
SharedStateRegistryFactory sharedStateRegistryFactory,
Executor ioExecutor,
Executor executor,
RecoveryClaimMode recoveryClaimMode)
throws Exception {
checkNotNull(configuration, "Configuration");
RetrievableStateStorageHelper<CompletedCheckpoint> stateStorage =
createFileSystemStateStorage(configuration, HA_STORAGE_COMPLETED_CHECKPOINT);
final ZooKeeperStateHandleStore<CompletedCheckpoint> completedCheckpointStateHandleStore =
createZooKeeperStateHandleStore(client, getCheckpointsPath(), stateStorage);
Collection<CompletedCheckpoint> completedCheckpoints =
DefaultCompletedCheckpointStoreUtils.retrieveCompletedCheckpoints(
completedCheckpointStateHandleStore, ZooKeeperCheckpointStoreUtil.INSTANCE);
final CompletedCheckpointStore zooKeeperCompletedCheckpointStore =
new DefaultCompletedCheckpointStore<>(
maxNumberOfCheckpointsToRetain,
completedCheckpointStateHandleStore,
ZooKeeperCheckpointStoreUtil.INSTANCE,
completedCheckpoints,
sharedStateRegistryFactory.create(
ioExecutor, completedCheckpoints, recoveryClaimMode),
executor);
LOG.info(
"Initialized {} in '{}' with {}.",
DefaultCompletedCheckpointStore.class.getSimpleName(),
completedCheckpointStateHandleStore,
getCheckpointsPath());
return zooKeeperCompletedCheckpointStore;
}
/** Returns the JobID as a String (with leading slash). */
public static String getPathForJob(JobID jobId) {
checkNotNull(jobId, "Job ID");
return String.format("/%s", jobId);
}
/**
* Creates an instance of {@link ZooKeeperStateHandleStore}.
*
* @param client ZK client
* @param path Path to use for the client namespace
* @param stateStorage RetrievableStateStorageHelper that persist the actual state and whose
* returned state handle is then written to ZooKeeper
* @param <T> Type of state
* @return {@link ZooKeeperStateHandleStore} instance
* @throws Exception ZK errors
*/
public static <T extends Serializable>
ZooKeeperStateHandleStore<T> createZooKeeperStateHandleStore(
final CuratorFramework client,
final String path,
final RetrievableStateStorageHelper<T> stateStorage)
throws Exception {
return new ZooKeeperStateHandleStore<>(
useNamespaceAndEnsurePath(client, path), stateStorage);
}
/**
* Creates a {@link ZooKeeperCheckpointIDCounter} instance.
*
* @param client The {@link CuratorFramework} ZooKeeper client to use
* @return {@link ZooKeeperCheckpointIDCounter} instance
*/
public static ZooKeeperCheckpointIDCounter createCheckpointIDCounter(CuratorFramework client) {
return new ZooKeeperCheckpointIDCounter(
client, new DefaultLastStateConnectionStateListener());
}
/**
* Creates a {@link FileSystemStateStorageHelper} instance.
*
* @param configuration {@link Configuration} object
* @param prefix Prefix for the created files
* @param <T> Type of the state objects
* @return {@link FileSystemStateStorageHelper} instance
* @throws IOException if file system state storage cannot be created
*/
public static <T extends Serializable>
FileSystemStateStorageHelper<T> createFileSystemStateStorage(
Configuration configuration, String prefix) throws IOException {
return new FileSystemStateStorageHelper<>(
HighAvailabilityServicesUtils.getClusterHighAvailableStoragePath(configuration),
prefix);
}
/** Creates a ZooKeeper path of the form "/a/b/.../z". */
public static String generateZookeeperPath(String... paths) {
return Arrays.stream(paths)
.map(ZooKeeperUtils::trimSlashes)
.filter(s -> !s.isEmpty())
.collect(Collectors.joining("/", "/", ""));
}
/**
* Splits the given ZooKeeper path into its parts.
*
* @param path path to split
* @return splited path
*/
public static String[] splitZooKeeperPath(String path) {
return path.split("/");
}
public static String trimStartingSlash(String path) {
return path.startsWith("/") ? path.substring(1) : path;
}
private static String trimSlashes(String input) {
int left = 0;
int right = input.length() - 1;
while (left <= right && input.charAt(left) == '/') {
left++;
}
while (right >= left && input.charAt(right) == '/') {
right--;
}
if (left <= right) {
return input.substring(left, right + 1);
} else {
return "";
}
}
/**
* Returns a facade of the client that uses the specified namespace, and ensures that all nodes
* in the path exist.
*
* @param client ZK client
* @param path the new namespace
* @return ZK Client that uses the new namespace
* @throws Exception ZK errors
*/
public static CuratorFramework useNamespaceAndEnsurePath(
final CuratorFramework client, final String path) throws Exception {
checkNotNull(client, "client must not be null");
checkNotNull(path, "path must not be null");
// Ensure that the checkpoints path exists
client.newNamespaceAwareEnsurePath(path).ensure(client.getZookeeperClient());
// All operations will have the path as root
final String newNamespace = generateZookeeperPath(client.getNamespace(), path);
return client.usingNamespace(
// Curator prepends a '/' manually and throws an Exception if the
// namespace starts with a '/'.
trimStartingSlash(newNamespace));
}
/**
* Creates a {@link TreeCache} that only observes a specific node.
*
* @param client ZK client
* @param pathToNode full path of the node to observe
* @param nodeChangeCallback callback to run if the node has changed
* @return tree cache
*/
public static TreeCache createTreeCache(
final CuratorFramework client,
final String pathToNode,
final RunnableWithException nodeChangeCallback) {
final TreeCache cache =
createTreeCache(
client, pathToNode, ZooKeeperUtils.treeCacheSelectorForPath(pathToNode));
cache.getListenable().addListener(createTreeCacheListener(nodeChangeCallback));
return cache;
}
public static TreeCache createTreeCache(
final CuratorFramework client,
final String pathToNode,
final TreeCacheSelector selector) {
return TreeCache.newBuilder(client, pathToNode)
.setCacheData(true)
.setCreateParentNodes(false)
.setSelector(selector)
// see FLINK-32204 for further details on why the task rejection shouldn't
// be enforced here
.setExecutor(Executors.newDirectExecutorServiceWithNoOpShutdown())
.build();
}
@VisibleForTesting
static TreeCacheListener createTreeCacheListener(RunnableWithException nodeChangeCallback) {
return (ignored, event) -> {
// only notify listener if nodes have changed
// connection issues are handled separately from the cache
switch (event.getType()) {
case NODE_ADDED:
case NODE_UPDATED:
case NODE_REMOVED:
nodeChangeCallback.run();
}
};
}
/**
* Returns a {@link TreeCacheSelector} that only accepts a specific node.
*
* @param fullPath node to accept
* @return tree cache selector
*/
private static TreeCacheSelector treeCacheSelectorForPath(String fullPath) {
return new TreeCacheSelector() {
@Override
public boolean traverseChildren(String childPath) {
return false;
}
@Override
public boolean acceptChild(String childPath) {
return fullPath.equals(childPath);
}
};
}
/** Secure {@link ACLProvider} implementation. */
public static class SecureAclProvider implements ACLProvider {
@Override
public List<ACL> getDefaultAcl() {
return ZooDefs.Ids.CREATOR_ALL_ACL;
}
@Override
public List<ACL> getAclForPath(String path) {
return ZooDefs.Ids.CREATOR_ALL_ACL;
}
}
/** ZooKeeper client ACL mode enum. */
public enum ZkClientACLMode {
CREATOR,
OPEN;
/**
* Return the configured {@link ZkClientACLMode}.
*
* @param config The config to parse
* @return Configured ACL mode or the default defined by {@link
* HighAvailabilityOptions#ZOOKEEPER_CLIENT_ACL} if not configured.
*/
public static ZkClientACLMode fromConfig(Configuration config) {
String aclMode = config.get(HighAvailabilityOptions.ZOOKEEPER_CLIENT_ACL);
if (aclMode == null || aclMode.equalsIgnoreCase(OPEN.name())) {
return OPEN;
} else if (aclMode.equalsIgnoreCase(CREATOR.name())) {
return CREATOR;
} else {
String message = "Unsupported ACL option: [" + aclMode + "] provided";
LOG.error(message);
throw new IllegalConfigurationException(message);
}
}
}
public static void deleteZNode(CuratorFramework curatorFramework, String path)
throws Exception {
curatorFramework.delete().idempotent().deletingChildrenIfNeeded().forPath(path);
}
/** Private constructor to prevent instantiation. */
private ZooKeeperUtils() {
throw new RuntimeException();
}
}
|
apache/pulsar | 36,514 | pulsar-broker/src/test/java/org/apache/pulsar/broker/service/schema/KeyValueSchemaCompatibilityCheckTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.broker.service.schema;
import java.util.HashMap;
import java.util.Map;
import lombok.Data;
import org.apache.pulsar.client.api.schema.SchemaDefinition;
import org.apache.pulsar.client.impl.schema.AvroSchema;
import org.apache.pulsar.client.impl.schema.JSONSchema;
import org.apache.pulsar.client.impl.schema.KeyValueSchemaImpl;
import org.apache.pulsar.client.impl.schema.StringSchema;
import org.apache.pulsar.common.policies.data.SchemaCompatibilityStrategy;
import org.apache.pulsar.common.protocol.schema.SchemaData;
import org.apache.pulsar.common.schema.SchemaType;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
@Test(groups = "broker")
public class KeyValueSchemaCompatibilityCheckTest {
private final Map<SchemaType, SchemaCompatibilityCheck> checkers = new HashMap<>();
@Data
private static class Foo {
private String field1;
private String field2;
private int field3;
private KeyValueSchemaCompatibilityCheckTest.Bar field4;
}
@Data
private static class Bar {
private boolean field1;
}
@BeforeClass
protected void setup() {
checkers.put(SchemaType.AVRO, new AvroSchemaCompatibilityCheck());
checkers.put(SchemaType.JSON, new JsonSchemaCompatibilityCheck());
checkers.put(SchemaType.KEY_VALUE, new KeyValueSchemaCompatibilityCheck(checkers));
}
@Test
public void testCheckKeyValueAvroCompatibilityFull() {
AvroSchema<Foo> fooSchema = AvroSchema.of(SchemaDefinition.<Foo>builder()
.withPojo(Foo.class).build());
AvroSchema<Bar> barSchema = AvroSchema.of(SchemaDefinition.<Bar>builder()
.withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.AVRO));
properties.put("value.schema.type", String.valueOf(SchemaType.AVRO));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema).getSchemaInfo()
.getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema).getSchemaInfo()
.getSchema()).props(properties).build();
Assert.assertTrue(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.FULL));
}
@Test
public void testCheckKeyValueAvroInCompatibilityFull() {
AvroSchema<Foo> fooSchema = AvroSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
AvroSchema<Bar> barSchema = AvroSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.AVRO));
properties.put("value.schema.type", String.valueOf(SchemaType.AVRO));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(barSchema, fooSchema).getSchemaInfo()
.getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema).getSchemaInfo()
.getSchema()).props(properties).build();
Assert.assertFalse(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.FULL));
}
@Test
public void testCheckKeyValueAvroCompatibilityBackward() {
AvroSchema<Foo> fooSchema = AvroSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
AvroSchema<Bar> barSchema = AvroSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.AVRO));
properties.put("value.schema.type", String.valueOf(SchemaType.AVRO));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema).getSchemaInfo()
.getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema).getSchemaInfo()
.getSchema()).props(properties).build();
Assert.assertTrue(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.BACKWARD));
}
@Test
public void testCheckKeyValueAvroInCompatibilityBackward() {
AvroSchema<Foo> fooSchema = AvroSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
AvroSchema<Bar> barSchema = AvroSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.AVRO));
properties.put("value.schema.type", String.valueOf(SchemaType.AVRO));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(barSchema, fooSchema)
.getSchemaInfo().getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
Assert.assertFalse(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.BACKWARD));
}
@Test
public void testCheckKeyValueAvroCompatibilityForward() {
AvroSchema<Foo> fooSchema = AvroSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
AvroSchema<Bar> barSchema = AvroSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.AVRO));
properties.put("value.schema.type", String.valueOf(SchemaType.AVRO));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
Assert.assertTrue(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.FORWARD));
}
@Test
public void testCheckKeyValueAvroInCompatibilityForward() {
AvroSchema<Foo> fooSchema = AvroSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
AvroSchema<Bar> barSchema = AvroSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.AVRO));
properties.put("value.schema.type", String.valueOf(SchemaType.AVRO));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(barSchema, fooSchema)
.getSchemaInfo().getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
Assert.assertFalse(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.FORWARD));
}
@Test
public void testCheckKeyValueJsonCompatibilityFull() {
JSONSchema<Foo> fooSchema = JSONSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
JSONSchema<Bar> barSchema = JSONSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.JSON));
properties.put("value.schema.type", String.valueOf(SchemaType.JSON));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
Assert.assertTrue(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.FULL));
}
@Test
public void testCheckKeyValueJsonInCompatibilityFull() {
JSONSchema<Foo> fooSchema = JSONSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
JSONSchema<Bar> barSchema = JSONSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.JSON));
properties.put("value.schema.type", String.valueOf(SchemaType.JSON));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(barSchema, fooSchema)
.getSchemaInfo().getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
Assert.assertFalse(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.FULL));
}
@Test
public void testCheckKeyValueJsonCompatibilityBackward() {
JSONSchema<Foo> fooSchema = JSONSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
JSONSchema<Bar> barSchema = JSONSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.JSON));
properties.put("value.schema.type", String.valueOf(SchemaType.JSON));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
Assert.assertTrue(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.BACKWARD));
}
@Test
public void testCheckKeyValueJsonInCompatibilityBackWard() {
JSONSchema<Foo> fooSchema = JSONSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
JSONSchema<Bar> barSchema = JSONSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.JSON));
properties.put("value.schema.type", String.valueOf(SchemaType.JSON));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(barSchema, fooSchema)
.getSchemaInfo().getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
Assert.assertFalse(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.BACKWARD));
}
@Test
public void testCheckKeyValueJsonCompatibilityForward() {
JSONSchema<Foo> fooSchema = JSONSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
JSONSchema<Bar> barSchema = JSONSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.JSON));
properties.put("value.schema.type", String.valueOf(SchemaType.JSON));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
Assert.assertTrue(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.FORWARD));
}
@Test
public void testCheckKeyValueJsonInCompatibilityForward() {
JSONSchema<Foo> fooSchema = JSONSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
JSONSchema<Bar> barSchema = JSONSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.JSON));
properties.put("value.schema.type", String.valueOf(SchemaType.JSON));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(barSchema, fooSchema)
.getSchemaInfo().getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
Assert.assertFalse(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.FORWARD));
}
@Test
public void testCheckKeyAvroValueJsonCompatibilityFull() {
AvroSchema<Foo> fooSchema = AvroSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
JSONSchema<Bar> barSchema = JSONSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.AVRO));
properties.put("value.schema.type", String.valueOf(SchemaType.JSON));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
Assert.assertTrue(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.FULL));
}
@Test
public void testCheckKeyAvroValueJsonInCompatibilityFull() {
AvroSchema<Foo> fooSchema = AvroSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
JSONSchema<Bar> barSchema = JSONSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.AVRO));
properties.put("value.schema.type", String.valueOf(SchemaType.JSON));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(barSchema, fooSchema)
.getSchemaInfo().getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
Assert.assertFalse(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.FULL));
}
@Test
public void testCheckKeyAvroValueJsonCompatibilityBackward() {
AvroSchema<Foo> fooSchema = AvroSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
JSONSchema<Bar> barSchema = JSONSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.AVRO));
properties.put("value.schema.type", String.valueOf(SchemaType.JSON));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
Assert.assertTrue(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.BACKWARD));
}
@Test
public void testCheckKeyAvroValueJsonInCompatibilityBackward() {
AvroSchema<Foo> fooSchema = AvroSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
JSONSchema<Bar> barSchema = JSONSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.AVRO));
properties.put("value.schema.type", String.valueOf(SchemaType.JSON));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(barSchema, fooSchema)
.getSchemaInfo().getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
Assert.assertFalse(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.BACKWARD));
}
@Test
public void testCheckKeyAvroValueJsonCompatibilityForward() {
AvroSchema<Foo> fooSchema = AvroSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
JSONSchema<Bar> barSchema = JSONSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.AVRO));
properties.put("value.schema.type", String.valueOf(SchemaType.JSON));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
Assert.assertTrue(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.FORWARD));
}
@Test
public void testCheckKeyAvroValueJsonInCompatibilityForward() {
AvroSchema<Foo> fooSchema = AvroSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
JSONSchema<Bar> barSchema = JSONSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.AVRO));
properties.put("value.schema.type", String.valueOf(SchemaType.JSON));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(barSchema, fooSchema)
.getSchemaInfo().getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
Assert.assertFalse(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.FORWARD));
}
@Test
public void testCheckKeyJsonValueAvroCompatibilityFull() {
JSONSchema<Foo> fooSchema = JSONSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
AvroSchema<Bar> barSchema = AvroSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.JSON));
properties.put("value.schema.type", String.valueOf(SchemaType.AVRO));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
Assert.assertTrue(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.FULL));
}
@Test
public void testCheckKeyJsonValueAvroInCompatibilityFull() {
JSONSchema<Foo> fooSchema = JSONSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
AvroSchema<Bar> barSchema = AvroSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.JSON));
properties.put("value.schema.type", String.valueOf(SchemaType.AVRO));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(barSchema, fooSchema)
.getSchemaInfo().getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
Assert.assertFalse(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.FULL));
}
@Test
public void testCheckKeyJsonValueAvroCompatibilityBackward() {
JSONSchema<Foo> fooSchema = JSONSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
AvroSchema<Bar> barSchema = AvroSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.JSON));
properties.put("value.schema.type", String.valueOf(SchemaType.AVRO));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
Assert.assertTrue(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.BACKWARD));
}
@Test
public void testCheckKeyJsonValueAvroInCompatibilityBackward() {
JSONSchema<Foo> fooSchema = JSONSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
AvroSchema<Bar> barSchema = AvroSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.JSON));
properties.put("value.schema.type", String.valueOf(SchemaType.AVRO));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(barSchema, fooSchema)
.getSchemaInfo().getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
Assert.assertFalse(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.BACKWARD));
}
@Test
public void testCheckKeyJsonValueAvroCompatibilityForward() {
JSONSchema<Foo> fooSchema = JSONSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
AvroSchema<Bar> barSchema = AvroSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.JSON));
properties.put("value.schema.type", String.valueOf(SchemaType.AVRO));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
Assert.assertTrue(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.FORWARD));
}
@Test
public void testCheckKeyJsonValueAvroInCompatibilityForward() {
JSONSchema<Foo> fooSchema = JSONSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
AvroSchema<Bar> barSchema = AvroSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> properties = new HashMap<>();
properties.put("key.schema.type", String.valueOf(SchemaType.JSON));
properties.put("value.schema.type", String.valueOf(SchemaType.AVRO));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(barSchema, fooSchema)
.getSchemaInfo().getSchema()).props(properties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(properties).build();
Assert.assertFalse(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.FORWARD));
}
@Test
public void testCheckKeyJsonValueAvroKeyTypeInCompatibility() {
JSONSchema<Foo> fooSchema = JSONSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
AvroSchema<Bar> barSchema = AvroSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> fromProperties = new HashMap<>();
fromProperties.put("key.schema.type", String.valueOf(SchemaType.JSON));
fromProperties.put("value.schema.type", String.valueOf(SchemaType.AVRO));
Map<String, String> toProperties = new HashMap<>();
toProperties.put("key.schema.type", String.valueOf(SchemaType.AVRO));
toProperties.put("value.schema.type", String.valueOf(SchemaType.AVRO));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(fromProperties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(barSchema, barSchema)
.getSchemaInfo().getSchema()).props(toProperties).build();
Assert.assertFalse(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.FORWARD));
}
@Test
public void testCheckKeyJsonValueAvroValueTypeInCompatibility() {
JSONSchema<Foo> fooSchema = JSONSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
AvroSchema<Bar> barSchema = AvroSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> fromProperties = new HashMap<>();
fromProperties.put("key.schema.type", String.valueOf(SchemaType.JSON));
fromProperties.put("value.schema.type", String.valueOf(SchemaType.AVRO));
Map<String, String> toProperties = new HashMap<>();
toProperties.put("key.schema.type", String.valueOf(SchemaType.JSON));
toProperties.put("value.schema.type", String.valueOf(SchemaType.JSON));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema).getSchemaInfo()
.getSchema()).props(fromProperties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, fooSchema).getSchemaInfo()
.getSchema()).props(toProperties).build();
Assert.assertFalse(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.FORWARD));
}
@Test
public void testCheckPropertiesNullTypeCompatibility() {
AvroSchema<Foo> fooSchema = AvroSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
AvroSchema<Bar> barSchema = AvroSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> fromProperties = new HashMap<>();
fromProperties.put("key.schema.type", String.valueOf(SchemaType.AVRO));
fromProperties.put("value.schema.type", String.valueOf(SchemaType.AVRO));
Map<String, String> toProperties = new HashMap<>();
toProperties.put("key.schema.type", String.valueOf(SchemaType.AVRO));
toProperties.put("value.schema.type", String.valueOf(SchemaType.AVRO));
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(fromProperties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(toProperties).build();
Assert.assertTrue(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.FULL));
}
@Test
public void testCheckSchemaTypeNullCompatibility() {
AvroSchema<Foo> fooSchema = AvroSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
AvroSchema<Bar> barSchema = AvroSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
Map<String, String> fromProperties = new HashMap<>();
Map<String, String> toProperties = new HashMap<>();
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(fromProperties).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).props(toProperties).build();
Assert.assertTrue(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.FULL));
}
@Test
public void testCheckSchemaTypeAlwaysCompatibility() {
AvroSchema<Foo> fooSchema = AvroSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
AvroSchema<Bar> barSchema = AvroSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
StringSchema stringSchema = new StringSchema();
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.STRING)
.data(stringSchema.getSchemaInfo().getSchema()).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema).getSchemaInfo().getSchema()).build();
Assert.assertTrue(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.ALWAYS_COMPATIBLE));
}
@Test
public void testCheckSchemaTypeOtherCompatibility() {
AvroSchema<Foo> fooSchema = AvroSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
AvroSchema<Bar> barSchema = AvroSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
StringSchema stringSchema = new StringSchema();
SchemaData fromSchemaData = SchemaData.builder().type(SchemaType.STRING)
.data(stringSchema.getSchemaInfo().getSchema()).build();
SchemaData toSchemaData = SchemaData.builder().type(SchemaType.KEY_VALUE)
.data(KeyValueSchemaImpl.of(fooSchema, barSchema)
.getSchemaInfo().getSchema()).build();
Assert.assertFalse(checkers.get(SchemaType.KEY_VALUE)
.isCompatible(fromSchemaData, toSchemaData, SchemaCompatibilityStrategy.ALWAYS_INCOMPATIBLE));
}
}
|
apache/ignite | 35,733 | modules/indexing/src/test/java/org/apache/ignite/internal/processors/query/h2/GridSubqueryJoinOptimizerSelfTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.query.h2;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteSystemProperties;
import org.apache.ignite.cache.query.FieldsQueryCursor;
import org.apache.ignite.cache.query.SqlFieldsQuery;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.internal.IgniteEx;
import org.apache.ignite.internal.processors.cache.query.QueryCursorEx;
import org.apache.ignite.internal.processors.query.GridQueryFieldMetadata;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.junit.Assert;
import org.junit.Test;
/**
*
*/
public class GridSubqueryJoinOptimizerSelfTest extends GridCommonAbstractTest {
/** */
private static final Comparator<List<?>> ROW_COMPARATOR = new RowComparator();
/** */
private static final String CACHE_NAME = "cache";
/** */
private static IgniteEx ignite;
/** */
private static IgniteCache<Integer, Integer> cache;
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
super.beforeTestsStarted();
ignite = (IgniteEx)startGridsMultiThreaded(2);
cache = ignite.getOrCreateCache(new CacheConfiguration<>(CACHE_NAME));
prepare();
}
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
super.afterTestsStopped();
stopAllGrids();
}
/**
* Very simple case: all tables have an alias, all columns referred
* through tables' alias.
*/
@Test
public void testSelectExpression1() {
String outerSqlTemplate = "select e.name, (%s) from emp e order by 1";
String subSql = "select d.name from dep d where d.id = e.dep_id";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case when tables has no aliases, but they are different.
*/
@Test
public void testSelectExpression2() {
String outerSqlTemplate = "select name, (%s) dname from emp e order by 1";
String subSql = "select name from dep where id = dep_id";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case when tables are the same, inner table has an alias.
*/
@Test
public void testSelectExpression3() {
String outerSqlTemplate = "select name, (%s) ename from emp order by 1";
String subSql = "select name from emp e where e.id = emp.id";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case when tables are the same, outer table has an alias.
*/
@Test
public void testSelectExpression4() {
String outerSqlTemplate = "select name, (%s) ename from emp e order by 1";
String subSql = "select name from emp where e.id = id";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case when tables are the same, the outer table has an alias, the inner has an additional filter.
*/
@Test
public void testSelectExpression5() {
String outerSqlTemplate = "select name, (%s) ename from emp e order by 1";
String subSql = "select name from emp where e.id = id and name = 'emp1'";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case like {@link #testSelectExpression1()}, but PK of the inner table is compound.
*/
@Test
public void testSelectExpressionCompoundPk() {
String outerSqlTemplate = "select e.name, (%s) from emp e order by 1";
String subSql = "select d.name from dep2 d where d.id = e.dep_id and d.id2 = e.id";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case where several subqueries used as several indeoendent columns.
*/
@Test
public void testSelectExpressionMultiple1() {
String outerSqlTemplate = "select (%s), (%s), (%s) from emp e1 order by 2, 3";
String subSql1 = "select 42";
String subSql2 = "select e2.name from emp e2 where e2.id = e1.id";
String subSql3 = "select d.name from dep d where d.id = dep_id";
String resSql = String.format(outerSqlTemplate, subSql1, subSql2, subSql3);
check(resSql, 2);
}
/**
* Case where several subqueries used within a function.
*/
@Test
public void testSelectExpressionMultiple2() {
String outerSqlTemplate = "select name, (%s) as lbl from emp e1 order by 1";
String subSql = "(select 'prefix ' as pref_val) " +
"|| (select name from dep2 dn where dn.id = dep_id and dn.id2 = dep_id)";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 2);
}
/**
* Simple case to ensure subqueries correctly pulled out from table list.
*/
@Test
public void testTableList1() {
String outerSqlTemplate = "select e.name, d.name from emp e, (%s) d where e.dep_id = d.id order by 1, 2";
String subSql = "select dd.name, dd.id from dep dd where dd.id < 100";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case to ensure subqueries correctly pulled out from table list, but tables the same.
*/
@Test
public void testTableList2() {
String outerSqlTemplate = "select d1.name, d2.name from dep d1, (%s) d2 where d1.id = d2.id order by 1, 2";
String subSql = "select d.name, d.id from dep d where d.id < 100";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* The same as {@link #testTableList2()}, but inner table has no alias.
*/
@Test
public void testTableList3() {
String outerSqlTemplate = "select d1.name, d2.name from dep d1, (%s) d2 where d1.id = d2.id order by 1, 2";
String subSql = "select name, id from dep where id < 100";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* The same as {@link #testTableList2()}, but both tables have no aliases.
*/
@Test
public void testTableList4() {
String outerSqlTemplate = "select name from dep, (%s) d where dep.id = d.id order by 1";
String subSql = "select id from dep where id < 100";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case to ensure subqueries correctly pulled out from table list, but inner table has no filters.
*/
@Test
public void testTableList5() {
String outerSqlTemplate = "select name from dep, (%s) d where dep.id = (MOD(d.id, 7) + 1) order by 1";
String subSql = "select id from dep";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case to ensure simple hierarchical subqueries properly pulled out from table list.
*/
@Test
public void testTableList6() {
String outerSqlTemplate = "select name from (%s) d where d.id < 100 order by 1";
String subSql = "select * from (select id, name from dep)";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case to ensure explicitly declared inner join with explicit join condition
* properly pulled out from table list.
*/
@Test
public void testTableList7() {
String outerSqlTemplate = "select e.name from emp e inner join (%s) d on e.id = d.id where d.id < 100 order by 1";
String subSql = "select id from dep";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case to ensure explicitly declared inner join with implicit join condition
* properly pulled out from table list.
*/
@Test
public void testTableList8() {
String outerSqlTemplate = "select e.name from emp e inner join (%s) d where e.id = d.id and d.id < 100 order by 1";
String subSql = "select id from dep";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case to ensure explicitly declared left join with explicit join condition
* properly pulled out from table list.
*/
@Test
public void testTableList9() {
String outerSqlTemplate = "select e.name from emp e left join (%s) d on e.id = d.id where d.id < 100 order by 1";
String subSql = "select id from dep";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case to ensure explicitly declared left join without join condition
* properly pulled out from table list.
*/
@Test
public void testTableList10() {
String outerSqlTemplate = "select e.name from emp e left join (%s) d where e.id = d.id and d.id < 100 order by 1";
String subSql = "select id from dep";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* The same as {@link #testTableList2()}, but both tables have no aliases.
*/
@Test
public void testTableListMultiple() {
String outerSqlTemplate = "select name from dep, (%s) d, (%s) e where dep.id = d.id order by 1";
String subSql1 = "select id from dep where id < 100";
String subSql2 = "select id from emp where id < 100";
String resSql = String.format(outerSqlTemplate, subSql1, subSql2);
check(resSql, 1);
}
/**
* Simple case where all tables have an alias, all columns referred
* through tables' alias.
*/
@Test
public void testExistsClause1() {
String outerSqlTemplate = "select e.id, e.name from emp e where exists (%s) order by 1";
String subSql = "select 1 from dep d where d.id = e.dep_id";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Simple case, but inner table has no alias.
*/
@Test
public void testExistsClause2() {
String outerSqlTemplate = "select e.id, e.name from emp e where exists (%s) order by 1";
String subSql = "select 1 from dep where id = e.dep_id";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Simple case, but outer table has no alias.
*/
@Test
public void testExistsClause3() {
String outerSqlTemplate = "select id, name from emp where exists (%s) order by 1";
String subSql = "select 1 from dep d where d.id = dep_id";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Simple case, but both tables have no aliases.
*/
@Test
public void testExistsClause4() {
String outerSqlTemplate = "select id, name from emp where exists (%s) order by 1";
String subSql = "select 1 from dep where id = dep_id";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Simple case, but tables are the same.
*/
@Test
public void testExistsClause5() {
String outerSqlTemplate = "select id, name from emp e where exists (%s) order by 1";
String subSql = "select 1 from emp where id = e.id";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Select result of boolean operation with constant in subquery. For this case optimization is disabled.
*/
@Test
public void testExistsClause6() {
String outerSqlTemplate = "SELECT (EXISTS (%s) )";
String subSql1 = "SELECT 1 FROM emp WHERE id = 1";
String subSql2 = "SELECT 1";
check(String.format(outerSqlTemplate, subSql1), 2);
check(String.format(outerSqlTemplate, subSql2), 2);
}
/**
* This case is similar to EXISTS case. Optimization is disabled.
*/
@Test
public void testInCause() {
String outerSqlTemplate = "SELECT e.id, 1 IN (%s) from emp e";
String subSql = "SELECT 1 FROM emp WHERE id = 1";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 2);
}
/**
* This case is similar to EXISTS case. Optimization is disabled.
*/
@Test
public void testNotInClause() {
String outerSqlTemplate = "SELECT 1 NOT IN (%s)";
String subSql = "SELECT 1 FROM emp WHERE id = 1";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 2);
}
/**
* Test boolean expression with AND operation. Optimization is disabled.
*/
@Test
public void testAndClause() {
String outerSqlTemplate = "SELECT TRUE AND (%s)";
String subSql = "SELECT TRUE FROM emp WHERE id = 1";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 2);
}
/**
* Test subquery with constant. Optimization is disabled.
*/
@Test
public void testConstantSubQuery() {
String outerSqlTemplate = "SELECT (%s)";
String subSql = "SELECT 1 FROM emp WHERE id = 1";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 2);
}
/**
* Test different operations with subquery. Optimization is disabled.
*/
@Test
public void testOperationWithSubQuery() {
String outerSqlTemplate1 = "SELECT 1 = (%s)";
String outerSqlTemplate2 = "SELECT 1 || (%s)";
String outerSqlTemplate3 = "SELECT 1 + (%s)";
String outerSqlTemplate4 = "SELECT (%s) IS NULL";
String subSql = "SELECT 1 FROM emp WHERE id = 1";
check(String.format(outerSqlTemplate1, subSql), 2);
check(String.format(outerSqlTemplate2, subSql), 2);
check(String.format(outerSqlTemplate3, subSql), 2);
check(String.format(outerSqlTemplate4, subSql), 2);
}
/**
* Case with IN condition.
*/
@Test
public void testWhereInClause() {
String outerSqlTemplate = "select id, name from emp e where 1 IN (%s) order by 1";
String subSql = "select 1 from emp where id = e.id";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Simple case, but inner table has coumpound PK.
*/
@Test
public void testExistsClauseCompoundPk() {
String outerSqlTemplate = "select id, name from emp e where exists (%s) order by 1";
String subSql = "select 1 from dep2 where id = e.id and id2 = 12";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case with multiple EXISTS clauses.
*/
@Test
public void testExistsClauseMultiple() {
String outerSqlTemplate = "select id, name from dep2 d where exists (%s) and exists (%s) order by 1";
String subSql1 = "select 1 from emp where d.id = dep_id and id = 3";
String subSql2 = "select 1 from emp where d.id2 = dep_id and id = 3";
String resSql = String.format(outerSqlTemplate, subSql1, subSql2);
check(resSql, 1);
}
/**
* Simple case where all tables have an alias, all columns referred
* through tables' alias.
*/
@Test
public void testInClause1() {
String outerSqlTemplate = "select e.id, e.name from emp e where e.dep_id in (%s) order by 1";
String subSql = "select d.id from dep d";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Simple case, but inner table has no alias.
*/
@Test
public void testInClause2() {
String outerSqlTemplate = "select e.id, e.name from emp e where e.dep_id in (%s) order by 1";
String subSql = "select id from dep";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Simple case, but outer table has no alias.
*/
@Test
public void testInClause3() {
String outerSqlTemplate = "select id, name from emp where dep_id in (%s) order by 1";
String subSql = "select d.id from dep d";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Simple case, but both tables have no aliases.
*/
@Test
public void testInClause4() {
String outerSqlTemplate = "select id, name from emp where dep_id in (%s) order by 1";
String subSql = "select id from dep";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Simple case, but tables are the same.
*/
@Test
public void testInClause5() {
String outerSqlTemplate = "select id, name from emp e where id in (%s) order by 1";
String subSql = "select id from emp";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case to ensure that array on the left of the IN clause is handled properly.
*/
@Test
public void testInClause6() {
String outerSqlTemplate = "select id, name from emp e where (id, name) in (%s) order by 1";
String subSql = "select (id, name) from emp";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case to ensure that expression on the left of the IN clause is handled properly.
*/
@Test
public void testInClause7() {
String outerSqlTemplate = "select id, name from emp e where (2 + abs(id)) in (%s) order by 1";
String subSql = "select id from emp";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Simple case, but inner table has coumpound PK.
*/
@Test
public void testInClauseCompoundPk() {
String outerSqlTemplate = "select id, name from emp e where id in (%s) order by 1";
String subSql = "select id from dep2 where id2 = 12";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case with multiple IN clauses.
*/
@Test
public void testInClauseMultiple() {
String outerSqlTemplate = "select id, name from dep2 d where id in (%s) and id2 in (%s) order by 1";
String subSql = "select id from emp";
String resSql = String.format(outerSqlTemplate, subSql, subSql);
check(resSql, 1);
}
/**
* Test should verify cases with UNION ALL.
*/
@Test
public void testOptimizationAppliedToUnion() {
String sql = "" +
"select emp.name, d.id from emp, (select * from dep) d " +
"union all " +
"select (select name from dep2 where id = 1 and id2 = 1) as dep_name, d.id " +
"from dep d where exists (select 1 from dep where id = d.id)";
check(sql, 1);
}
/**
* Case with alias in subqueries in union.
*/
@Test
public void testOptimizationAliasUnion() {
String outerSqlTemplate = "SELECT d FROM (%s) u union all SELECT d FROM (%s) z;";
String subSql = "SELECT id + id * id as d FROM dep";
String resSql = String.format(outerSqlTemplate, subSql, subSql);
check(resSql, 1);
}
/**
* Case with alias in subquery.
*/
@Test
public void testOptimizationAlias1() {
String outerSqlTemplate = "SELECT d FROM (%s) u;";
String subSql = "SELECT id + id * id as d FROM dep";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case with the same alias as column name in subquery.
*/
@Test
public void testOptimizationAlias2() {
String outerSqlTemplate = "SELECT id FROM (%s) u;";
String subSql = "SELECT id + id * id as id FROM dep";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case with a double aliases in the query and subquery.
*/
@Test
public void testOptimizationAlias3() {
String outerSqlTemplate = "SELECT d1, d1 as p1, d2 as p2, d3::VARCHAR as p3, d2::VARCHAR as p4 FROM (%s) u;";
String subSql = "SELECT id as d1, id + 1 as d2, 2 + 2 as d3 FROM dep";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case with a sum of a set of variables with aliases and different types (pure column, constant, sum).
*/
@Test
public void testOptimizationAlias4() {
String outerSqlTemplate = "SELECT (d1 + d2 + d3 + id) as p FROM (%s) u;";
String subSql = "SELECT id, id as d1, id + 1 as d2, 2 + 2 as d3 FROM dep";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case with constants in subquery without aliases.
*/
@Test
public void testOptimizationConstant1() {
String outerSqlTemplate = "SELECT * FROM (%s) u;";
String subSql = "SELECT 2 + 2, '1+1', 3.14::DECIMAL, extract(year from CURRENT_DATE()) FROM dep";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case with constant as column name.
*/
@Test
public void testOptimizationConstant2() {
String outerSqlTemplate = "SELECT \"42\" as p FROM (%s) u;";
String subSql = "SELECT 42 FROM dep";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case with WHEN-THEN-ELSE construction in subquery.
*/
@Test
public void testOptimizationCaseWhen() {
String outerSqlTemplate = "SELECT * FROM (%s) u;";
String subSql = "SELECT Case id When 1 Then 4 Else 3 End as A, Case id When 1 Then 3 Else 4 End as B FROM dep";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case with CAST function with alias in subquery.
*/
@Test
public void testOptimizationCastFunction() {
String outerSqlTemplate = "SELECT z FROM (%s) u;";
String subSql = "SELECT CAST(3.14 as DECIMAL) z FROM dep";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case with jeft join with WHERE FALSE condition inside.
*/
@Test
public void testOptimizationLeftJoinWhereFalse() {
String outerSqlTemplate = "SELECT * FROM dep AS t1 LEFT JOIN (%s) AS t2 ON t1.id = t2.id;";
String subSql = "SELECT * FROM dep2 WHERE false";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case with jeft join with false where condition inside.
*/
@Test
public void testOptimizationLeftJoinWhereFalse2() {
String outerSqlTemplate = "SELECT * FROM dep AS t1 LEFT JOIN (%s) AS t2 ON t1.id = t2.id;";
String subSql = "SELECT * FROM dep2 WHERE dep2.id IS NULL";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case with jeft join with false where condition inside.
*/
@Test
public void testOptimizationLeftJoinWhereFalse3() {
String outerSqlTemplate = "SELECT * FROM dep AS t1 LEFT JOIN (%s) AS t2 ON t1.id = true;";
String subSql = "SELECT * FROM dep2 WHERE dep2.id IS NULL";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Case with left join where subquery is on the left shoulder.
*/
@Test
public void testOptimizationLeftJoinSubqueryOnLeft() {
String outerSqlTemplate = "SELECT * FROM (%s) AS t1 LEFT JOIN dep AS t2 ON t1.dep_id = t2.id;";
String subSql = "SELECT * FROM emp WHERE id % 2 = 1";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 1);
}
/**
* Test should verify all cases where subquery should not be rewrited.
*/
@Test
public void testOptimizationShouldNotBeApplied1() {
String sql = "" +
// follow should not be rewrited beacuse of aggregates
"select (select max(id) from emp) f1," +
" (select sum(id) from emp) f2," +
" (select distinct id from emp where id = 1) f3," +
" (select distinct(id) from emp where id = 1) f4," +
" (select id from emp where id = 1 group by id) f5," +
" (select id from emp where id = 1 group by 1) f6," +
" (select id from emp limit 1) f7," +
" (select id from emp where id = 2 offset 2) f8," +
// and this one because dep2 has compound pk (id, id2),
// so predicate over 'id' could not guarantee uniqueness
" (select id from dep2 where id = 2) f9" +
" from dep";
check(sql, 10);
}
/**
* Test should verify that optimization won't be applied when columns of the compound index
* connect by OR.
*/
@Test
public void testOptimizationShouldNotBeApplied2() {
String outerSqlTemplate = "select e.id, e.name from emp e where exists (%s) order by 1";
String subSql = "select 1 from dep2 d2 where d2.id = e.id or d2.id2 = e.id";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 2);
}
/**
* Case to ensure subquery with join won't be pulled out from table list.
*/
@Test
public void testOptimizationShouldNotBeApplied3() {
String outerSqlTemplate = "select e.name from emp e, (%s) d order by 1 limit 10";
String subSql = "select d1.id, d2.name from dep d1, dep d2";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 2 + 1/* explain reports one additional SELECT in comments */);
}
/**
* Case to ensure subquery with join won't be pulled out from EXISTS.
*/
@Test
public void testOptimizationShouldNotBeApplied4() {
String outerSqlTemplate = "select e.name from emp e where exists (%s) order by 1 limit 10";
String subSql = "select 1 from dep d1, dep d2 where d1.id = d2.id and d1.id = e.id";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 2);
}
/**
* Test should verify that optimization won't be applied when columns of the compound index
* connect by OR (ver 2).
*/
@Test
public void testOptimizationShouldNotBeApplied5() {
String outerSqlTemplate = "select e.id, e.name from emp e where exists (%s) order by 1";
String subSql = "select 1 from dep2 d2 where 1 = 1 and (d2.id = e.id or d2.id2 = e.id)";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 2);
}
/**
* Test should verify that optimization won't be applied when columns of the compound index
* connect by OR (ver 3).
*/
@Test
public void testOptimizationShouldNotBeApplied6() {
String outerSqlTemplate = "select e.id, e.name from emp e where exists (%s) order by 1";
String subSql = "select 1 from dep2 d2 where 1 = 1 and (d2.id = e.id and (d2.id2 = 1 or d2.id2 = 2))";
String resSql = String.format(outerSqlTemplate, subSql);
check(resSql, 2);
}
/**
* Case when select subquery is under EXISTS operator.
*/
@Test
public void testExistsOperatorWithSubqueryUnderSelect() {
String outerSqlTemplate = "SELECT (EXISTS (%s));";
String subSql1 = "SELECT id FROM dep WHERE id = 1";
String subSql2 = "SELECT 1 FROM dep WHERE id = 1";
check(String.format(outerSqlTemplate, subSql1), 2);
check(String.format(outerSqlTemplate, subSql2), 2);
}
/**
* Case when select subquery is under different possible operators.
*/
@Test
public void testDifferentOperatorsWithSubqueryUnderSelect() {
String outerSqlTemplate = "SELECT (%s);";
List<String> subSelects = new ArrayList<>();
subSelects.add("(SELECT %s FROM dep WHERE id = 1) IN (1,-1)");
subSelects.add("(SELECT %s FROM dep WHERE id = 1) IS NOT NULL");
subSelects.add("(SELECT %s FROM dep WHERE id = 1) IS 1");
subSelects.add("(SELECT %s FROM dep WHERE id = 1) < 0");
subSelects.add("-(SELECT %s FROM dep WHERE id = 1)");
subSelects.add("(SELECT %s FROM dep WHERE id = 1) = 2");
for (String param : Arrays.asList("1", "2")) {
for (String subSelect : subSelects) {
String formattedSubSelect = String.format(subSelect, param);
check(String.format(outerSqlTemplate, formattedSubSelect), 2);
}
}
for (String subSelect : subSelects) {
String formattedSubSelect = String.format(subSelect, "id");
check(String.format(outerSqlTemplate, formattedSubSelect), 1);
}
}
/**
* Case when select subquery from another subquery, which is select from another subquery.
*/
@Test
public void testSeveralNestedSubqueries() {
String innerSql = "SELECT id as id0, name as name0 FROM dep";
String outerSqlTemplate = "SELECT id%d as id%d, name%d as name%d FROM %s WHERE id%d > %d";
String curSql = innerSql;
for (int i = 0; i < 5; i++) {
curSql = String.format(outerSqlTemplate, i, i + 1, i, i + 1, '(' + curSql + ')', i, i);
check(curSql, 1);
}
}
/**
* @param sql Sql.
* @param expSelectClauses Expected select clauses.
*/
private void check(String sql, int expSelectClauses) {
optimizationEnabled(false);
FieldsQueryCursor<List<?>> qry = cache.query(new SqlFieldsQuery(sql));
List<GridQueryFieldMetadata> expMetaList = ((QueryCursorEx<List<?>>)qry).fieldsMeta();
List<List<?>> exp = qry.getAll();
exp.sort(ROW_COMPARATOR);
optimizationEnabled(true);
FieldsQueryCursor<List<?>> optQry = cache.query(new SqlFieldsQuery(sql).setEnforceJoinOrder(true));
List<GridQueryFieldMetadata> actMetaList = ((QueryCursorEx<List<?>>)optQry).fieldsMeta();
List<List<?>> act = optQry.getAll();
act.sort(ROW_COMPARATOR);
Assert.assertEquals("Result set mismatch", exp, act);
List<String> expFieldTypes = new ArrayList<>();
List<String> actualFieldTypes = new ArrayList<>();
for (int i = 0; i < expMetaList.size(); i++) {
GridQueryFieldMetadata expMeta = expMetaList.get(i);
GridQueryFieldMetadata actMeta = actMetaList.get(i);
expFieldTypes.add(expMeta.fieldName() + ":" + expMeta.fieldTypeName());
actualFieldTypes.add(actMeta.fieldName() + ":" + actMeta.fieldTypeName());
}
Assert.assertEquals("Result set field names or field types mismatch", expFieldTypes, actualFieldTypes);
String plan = cache.query(new SqlFieldsQuery("explain " + sql)).getAll().get(0).get(0).toString();
System.out.println(plan);
int actCnt = countEntries(plan, "SELECT");
Assert.assertEquals(String.format("SELECT-clause count mismatch: exp=%d, act=%d, plan=[%s]",
expSelectClauses, actCnt, plan), expSelectClauses, actCnt);
}
/**
* Creates all neccessary tables and inserts data.
*/
private void prepare() {
Random rnd = new Random();
cache.query(new SqlFieldsQuery("CREATE TABLE dep (id LONG PRIMARY KEY, name VARCHAR, dep_name VARCHAR)"));
cache.query(new SqlFieldsQuery("CREATE TABLE dep2 (id LONG, id2 LONG, name VARCHAR, PRIMARY KEY(id, id2))"));
cache.query(new SqlFieldsQuery("CREATE TABLE emp (id LONG PRIMARY KEY, name VARCHAR, dep_id LONG)"));
for (int i = 0; i < 20; i++) {
cache.query(new SqlFieldsQuery("insert into dep (id, name, dep_name) values(?, ?, ?)")
.setArgs(i, "dep" + i, "dep" + i));
cache.query(new SqlFieldsQuery("insert into dep2 (id, id2, name) values(?, ?, ?)")
.setArgs(i, i, "dep" + i));
cache.query(new SqlFieldsQuery("insert into emp (id, name, dep_id) values(?, ?, ?)")
.setArgs(i, "emp" + i, i < 10 ? rnd.nextInt(10) : null));
}
}
/**
* Count of entries of substring in string.
*
* @param where Where to search.
* @param what What to search.
* @return Count of entries or -1 if non is found.
*/
private int countEntries(String where, String what) {
return where.split(what).length - 1;
}
/** */
private void optimizationEnabled(boolean enabled) {
System.setProperty(IgniteSystemProperties.IGNITE_ENABLE_SUBQUERY_REWRITE_OPTIMIZATION, String.valueOf(enabled));
GridTestUtils.setFieldValue(GridSubqueryJoinOptimizer.class, "optimizationEnabled", null);
}
/** */
@SuppressWarnings("ComparatorNotSerializable")
private static class RowComparator implements Comparator<List<?>> {
/** {@inheritDoc} */
@Override public int compare(List<?> o1, List<?> o2) {
if (o1 == null && o2 == null)
return 0;
if (o1 == null)
return 1;
if (o2 == null)
return -1;
Iterator<?> i1 = o1.iterator(), i2 = o2.iterator();
while (i1.hasNext() && i2.hasNext()) {
Object e1 = i1.next(), e2 = i2.next();
if (e1 == null && e2 == null)
continue;
if (e1 == null)
return 1;
if (e2 == null)
return -1;
checkComparable(e1);
checkComparable(e2);
int res = ((Comparable<Object>)e1).compareTo(e2);
if (res != 0)
return res;
}
return Integer.signum(o1.size() - o2.size());
}
/** */
private void checkComparable(Object o) {
if (!(o instanceof Comparable))
throw new RuntimeException(o.getClass().getSimpleName() + " is not comparable");
}
}
}
|
googleapis/google-cloud-java | 36,091 | java-websecurityscanner/proto-google-cloud-websecurityscanner-v1/src/main/java/com/google/cloud/websecurityscanner/v1/UpdateScanConfigRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/websecurityscanner/v1/web_security_scanner.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.websecurityscanner.v1;
/**
*
*
* <pre>
* Request for the `UpdateScanConfigRequest` method.
* </pre>
*
* Protobuf type {@code google.cloud.websecurityscanner.v1.UpdateScanConfigRequest}
*/
public final class UpdateScanConfigRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.websecurityscanner.v1.UpdateScanConfigRequest)
UpdateScanConfigRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateScanConfigRequest.newBuilder() to construct.
private UpdateScanConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateScanConfigRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateScanConfigRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.websecurityscanner.v1.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1_UpdateScanConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.websecurityscanner.v1.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1_UpdateScanConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest.class,
com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest.Builder.class);
}
private int bitField0_;
public static final int SCAN_CONFIG_FIELD_NUMBER = 2;
private com.google.cloud.websecurityscanner.v1.ScanConfig scanConfig_;
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*
* @return Whether the scanConfig field is set.
*/
@java.lang.Override
public boolean hasScanConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*
* @return The scanConfig.
*/
@java.lang.Override
public com.google.cloud.websecurityscanner.v1.ScanConfig getScanConfig() {
return scanConfig_ == null
? com.google.cloud.websecurityscanner.v1.ScanConfig.getDefaultInstance()
: scanConfig_;
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*/
@java.lang.Override
public com.google.cloud.websecurityscanner.v1.ScanConfigOrBuilder getScanConfigOrBuilder() {
return scanConfig_ == null
? com.google.cloud.websecurityscanner.v1.ScanConfig.getDefaultInstance()
: scanConfig_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 3;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getScanConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(3, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getScanConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest)) {
return super.equals(obj);
}
com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest other =
(com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest) obj;
if (hasScanConfig() != other.hasScanConfig()) return false;
if (hasScanConfig()) {
if (!getScanConfig().equals(other.getScanConfig())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasScanConfig()) {
hash = (37 * hash) + SCAN_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getScanConfig().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for the `UpdateScanConfigRequest` method.
* </pre>
*
* Protobuf type {@code google.cloud.websecurityscanner.v1.UpdateScanConfigRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.websecurityscanner.v1.UpdateScanConfigRequest)
com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.websecurityscanner.v1.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1_UpdateScanConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.websecurityscanner.v1.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1_UpdateScanConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest.class,
com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest.Builder.class);
}
// Construct using com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getScanConfigFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
scanConfig_ = null;
if (scanConfigBuilder_ != null) {
scanConfigBuilder_.dispose();
scanConfigBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.websecurityscanner.v1.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1_UpdateScanConfigRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest
getDefaultInstanceForType() {
return com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest build() {
com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest buildPartial() {
com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest result =
new com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.scanConfig_ = scanConfigBuilder_ == null ? scanConfig_ : scanConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest) {
return mergeFrom((com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest other) {
if (other
== com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest.getDefaultInstance())
return this;
if (other.hasScanConfig()) {
mergeScanConfig(other.getScanConfig());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 18:
{
input.readMessage(getScanConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 18
case 26:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.websecurityscanner.v1.ScanConfig scanConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.websecurityscanner.v1.ScanConfig,
com.google.cloud.websecurityscanner.v1.ScanConfig.Builder,
com.google.cloud.websecurityscanner.v1.ScanConfigOrBuilder>
scanConfigBuilder_;
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*
* @return Whether the scanConfig field is set.
*/
public boolean hasScanConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*
* @return The scanConfig.
*/
public com.google.cloud.websecurityscanner.v1.ScanConfig getScanConfig() {
if (scanConfigBuilder_ == null) {
return scanConfig_ == null
? com.google.cloud.websecurityscanner.v1.ScanConfig.getDefaultInstance()
: scanConfig_;
} else {
return scanConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*/
public Builder setScanConfig(com.google.cloud.websecurityscanner.v1.ScanConfig value) {
if (scanConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
scanConfig_ = value;
} else {
scanConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*/
public Builder setScanConfig(
com.google.cloud.websecurityscanner.v1.ScanConfig.Builder builderForValue) {
if (scanConfigBuilder_ == null) {
scanConfig_ = builderForValue.build();
} else {
scanConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*/
public Builder mergeScanConfig(com.google.cloud.websecurityscanner.v1.ScanConfig value) {
if (scanConfigBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& scanConfig_ != null
&& scanConfig_
!= com.google.cloud.websecurityscanner.v1.ScanConfig.getDefaultInstance()) {
getScanConfigBuilder().mergeFrom(value);
} else {
scanConfig_ = value;
}
} else {
scanConfigBuilder_.mergeFrom(value);
}
if (scanConfig_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*/
public Builder clearScanConfig() {
bitField0_ = (bitField0_ & ~0x00000001);
scanConfig_ = null;
if (scanConfigBuilder_ != null) {
scanConfigBuilder_.dispose();
scanConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*/
public com.google.cloud.websecurityscanner.v1.ScanConfig.Builder getScanConfigBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getScanConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*/
public com.google.cloud.websecurityscanner.v1.ScanConfigOrBuilder getScanConfigOrBuilder() {
if (scanConfigBuilder_ != null) {
return scanConfigBuilder_.getMessageOrBuilder();
} else {
return scanConfig_ == null
? com.google.cloud.websecurityscanner.v1.ScanConfig.getDefaultInstance()
: scanConfig_;
}
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.websecurityscanner.v1.ScanConfig,
com.google.cloud.websecurityscanner.v1.ScanConfig.Builder,
com.google.cloud.websecurityscanner.v1.ScanConfigOrBuilder>
getScanConfigFieldBuilder() {
if (scanConfigBuilder_ == null) {
scanConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.websecurityscanner.v1.ScanConfig,
com.google.cloud.websecurityscanner.v1.ScanConfig.Builder,
com.google.cloud.websecurityscanner.v1.ScanConfigOrBuilder>(
getScanConfig(), getParentForChildren(), isClean());
scanConfig_ = null;
}
return scanConfigBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.websecurityscanner.v1.UpdateScanConfigRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1.UpdateScanConfigRequest)
private static final com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest();
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateScanConfigRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateScanConfigRequest>() {
@java.lang.Override
public UpdateScanConfigRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateScanConfigRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateScanConfigRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/geode | 36,094 | geode-core/src/main/java/org/apache/geode/cache/query/internal/ObjectIntHashMap.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.cache.query.internal;
import java.io.IOException;
import java.io.Serializable;
import java.util.AbstractSet;
import java.util.Collection;
import java.util.Collections;
import java.util.ConcurrentModificationException;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.TreeMap;
/**
* This class was derived from <tt>HashMap<tt> implementation of the
* <tt>Map</tt> interface. This implementation provides all of the optional map operations, and
* supports ONLY int primitive type values, stored in primitive field of int type instead of
* promoting them to Integer and the <tt>null</tt> key. The default for a value is 0. (The
* <tt>ObjectIntHashMap</tt> class is roughly equivalent to <tt>HashMap</tt>, except that it is
* unsynchronized and permits nulls.) This class makes no guarantees as to the order of the map; in
* particular, it does not guarantee that the order will remain constant over time.
*
* <p>
* This implementation provides constant-time performance for the basic operations (<tt>get</tt> and
* <tt>put</tt>), assuming the hash function disperses the elements properly among the buckets.
* Iteration over collection views requires time proportional to the "capacity" of the
* <tt>HashMap</tt> instance (the number of buckets) plus its size (the number of key-value
* mappings). Thus, it's very important not to set the initial capacity too high (or the load factor
* too low) if iteration performance is important.
*
* <p>
* An instance of <tt>HashMap</tt> has two parameters that affect its performance: <i>initial
* capacity</i> and <i>load factor</i>. The <i>capacity</i> is the number of buckets in the hash
* table, and the initial capacity is simply the capacity at the time the hash table is created. The
* <i>load factor</i> is a measure of how full the hash table is allowed to get before its capacity
* is automatically increased. When the number of entries in the hash table exceeds the product of
* the load factor and the current capacity, the hash table is <i>rehashed</i> (that is, internal
* data structures are rebuilt) so that the hash table has approximately twice the number of
* buckets.
*
* <p>
* As a general rule, the default load factor (.75) offers a good tradeoff between time and space
* costs. Higher values decrease the space overhead but increase the lookup cost (reflected in most
* of the operations of the <tt>HashMap</tt> class, including <tt>get</tt> and <tt>put</tt>). The
* expected number of entries in the map and its load factor should be taken into account when
* setting its initial capacity, so as to minimize the number of rehash operations. If the initial
* capacity is greater than the maximum number of entries divided by the load factor, no rehash
* operations will ever occur.
*
* <p>
* If many mappings are to be stored in a <tt>HashMap</tt> instance, creating it with a sufficiently
* large capacity will allow the mappings to be stored more efficiently than letting it perform
* automatic rehashing as needed to grow the table.
*
* <p>
* <strong>Note that this implementation is not synchronized.</strong> If multiple threads access a
* hash map concurrently, and at least one of the threads modifies the map structurally, it
* <i>must</i> be synchronized externally. (A structural modification is any operation that adds or
* deletes one or more mappings; merely changing the value associated with a key that an instance
* already contains is not a structural modification.) This is typically accomplished by
* synchronizing on some object that naturally encapsulates the map.
*
* If no such object exists, the map should be "wrapped" using the
* {@link Collections#synchronizedMap Collections.synchronizedMap} method. This is best done at
* creation time, to prevent accidental unsynchronized access to the map:
*
* <pre>
* Map m = Collections.synchronizedMap(new IntHashMap(...));
* </pre>
*
* <p>
* The iterators returned by all of this class's "collection view methods" are <i>fail-fast</i>: if
* the map is structurally modified at any time after the iterator is created, in any way except
* through the iterator's own <tt>remove</tt> method, the iterator will throw a
* {@link ConcurrentModificationException}. Thus, in the face of concurrent modification, the
* iterator fails quickly and cleanly, rather than risking arbitrary, non-deterministic behavior at
* an undetermined time in the future.
*
* <p>
* Note that the fail-fast behavior of an iterator cannot be guaranteed as it is, generally
* speaking, impossible to make any hard guarantees in the presence of unsynchronized concurrent
* modification. Fail-fast iterators throw <tt>ConcurrentModificationException</tt> on a best-effort
* basis. Therefore, it would be wrong to write a program that depended on this exception for its
* correctness: <i>the fail-fast behavior of iterators should be used only to detect bugs.</i>
*
* @author Doug Lea
* @author Josh Bloch
* @author Arthur van Hoff
* @author Neal Gafter
*
* @version %I%, %G%
* @see Object#hashCode()
* @see Collection
* @see Map
* @see TreeMap
* @see Hashtable
* @since 1.2
* @since GemFire 7.1
*/
public class ObjectIntHashMap implements Cloneable, Serializable {
private static final long serialVersionUID = 7718697444988416372L;
/**
* The default initial capacity - MUST be a power of two.
*/
static final int DEFAULT_INITIAL_CAPACITY = 16;
/**
* The maximum capacity, used if a higher value is implicitly specified by either of the
* constructors with arguments. MUST be a power of two <= 1<<30.
*/
static final int MAXIMUM_CAPACITY = 1 << 30;
/**
* The load factor used when none specified in constructor.
*/
static final float DEFAULT_LOAD_FACTOR = 0.75f;
/**
* The table, resized as necessary. Length MUST Always be a power of two.
*/
transient Entry[] table;
/**
* The number of key-value mappings contained in this map.
*/
transient int size;
/**
* The next size value at which to resize (capacity * load factor).
*
* @serial
*/
int threshold;
/**
* The load factor for the hash table.
*
* @serial
*/
final float loadFactor;
/**
* The number of times this IntHashMap has been structurally modified Structural modifications are
* those that change the number of mappings in the IntHashMap or otherwise modify its internal
* structure (e.g., rehash). This field is used to make iterators on Collection-views of the
* IntHashMap fail-fast. (See ConcurrentModificationException).
*/
transient volatile int modCount;
/**
* Hashing strategy for key objects.
*
*/
final HashingStrategy hashingStrategy; // GemFire addition
/**
* Constructs an empty <tt>HashMap</tt> with the specified initial capacity and load factor.
*
* @param initialCapacity the initial capacity
* @param loadFactor the load factor
* @throws IllegalArgumentException if the initial capacity is negative or the load factor is
* nonpositive
*/
public ObjectIntHashMap(int initialCapacity, float loadFactor, HashingStrategy hs) {
if (initialCapacity < 0) {
throw new IllegalArgumentException("Illegal initial capacity: " + initialCapacity);
}
if (initialCapacity > MAXIMUM_CAPACITY) {
initialCapacity = MAXIMUM_CAPACITY;
}
if (loadFactor <= 0 || Float.isNaN(loadFactor)) {
throw new IllegalArgumentException("Illegal load factor: " + loadFactor);
}
// Find a power of 2 >= initialCapacity
int capacity = 1;
while (capacity < initialCapacity) {
capacity <<= 1;
}
this.loadFactor = loadFactor;
threshold = (int) (capacity * loadFactor);
table = new Entry[capacity];
hashingStrategy = (hs == null) ? new IntHashMapStrategy() : hs;
init();
}
public ObjectIntHashMap(int initialCapacity, float loadFactor) {
this(initialCapacity, loadFactor, null);
}
/**
* Constructs an empty <tt>HashMap</tt> with the specified initial capacity and the default load
* factor (0.75).
*
* @param initialCapacity the initial capacity.
* @throws IllegalArgumentException if the initial capacity is negative.
*/
public ObjectIntHashMap(int initialCapacity) {
this(initialCapacity, DEFAULT_LOAD_FACTOR, null);
}
public ObjectIntHashMap(int initialCapacity, HashingStrategy hs) {
this(initialCapacity, DEFAULT_LOAD_FACTOR, hs);
}
public ObjectIntHashMap() {
this(null);
}
/**
* Constructs an empty <tt>HashMap</tt> with the default initial capacity (16) and the default
* load factor (0.75).
*/
public ObjectIntHashMap(HashingStrategy hs) {
loadFactor = DEFAULT_LOAD_FACTOR;
threshold = (int) (DEFAULT_INITIAL_CAPACITY * DEFAULT_LOAD_FACTOR);
table = new Entry[DEFAULT_INITIAL_CAPACITY];
hashingStrategy = (hs == null) ? new IntHashMapStrategy() : hs;
init();
}
// internal utilities
/**
* Initialization hook for subclasses. This method is called in all constructors and
* pseudo-constructors (clone, readObject) after IntHashMap has been initialized but before any
* entries have been inserted. (In the absence of this method, readObject would require explicit
* knowledge of subclasses.)
*/
void init() {}
/**
* Applies a supplemental hash function to a given hashCode, which defends against poor quality
* hash functions. This is critical because IntHashMap uses power-of-two length hash tables, that
* otherwise encounter collisions for hashCodes that do not differ in lower bits. Note: Null keys
* always map to hash 0, thus index 0.
*/
static int hash(int h) {
// This function ensures that hashCodes that differ only by
// constant multiples at each bit position have a bounded
// number of collisions (approximately 8 at default load factor).
h ^= (h >>> 20) ^ (h >>> 12);
return h ^ (h >>> 7) ^ (h >>> 4);
}
/**
* Returns index for hash code h.
*/
static int indexFor(int h, int length) {
return h & (length - 1);
}
/**
* Returns the number of key-value mappings in this map.
*
* @return the number of key-value mappings in this map
*/
public int size() {
return size;
}
/**
* Returns <tt>true</tt> if this map contains no key-value mappings.
*
* @return <tt>true</tt> if this map contains no key-value mappings
*/
public boolean isEmpty() {
return size == 0;
}
/**
* Returns the value to which the specified key is mapped, or {@code null} if this map contains no
* mapping for the key.
*
* <p>
* More formally, if this map contains a mapping from a key {@code k} to a value {@code v} such
* that {@code (key==null ? k==null :
* key.equals(k))}, then this method returns {@code v}; otherwise it returns {@code null}. (There
* can be at most one such mapping.)
*
* <p>
* A return value of {@code null} does not <i>necessarily</i> indicate that the map contains no
* mapping for the key; it's also possible that the map explicitly maps the key to {@code null}.
* The {@link #containsKey containsKey} operation may be used to distinguish these two cases.
*
* @see #put(Object, int)
*/
public int get(Object key) {
if (key == null) {
return getForNullKey();
}
int hash = hash(hashingStrategy.hashCode(key));
for (Entry e = table[indexFor(hash, table.length)]; e != null; e = e.next) {
Object k;
if (e.hash == hash && ((k = e.key) == key || hashingStrategy.equals(k, key))) {
return e.value;
}
}
return 0;
}
/**
* Offloaded version of get() to look up null keys. Null keys map to index 0. This null case is
* split out into separate methods for the sake of performance in the two most commonly used
* operations (get and put), but incorporated with conditionals in others.
*/
private int getForNullKey() {
for (Entry e = table[0]; e != null; e = e.next) {
if (e.key == null) {
return e.value;
}
}
return 0;
}
/**
* Returns <tt>true</tt> if this map contains a mapping for the specified key.
*
* @param key The key whose presence in this map is to be tested
* @return <tt>true</tt> if this map contains a mapping for the specified key.
*/
public boolean containsKey(Object key) {
return getEntry(key) != null;
}
/**
* Returns the entry associated with the specified key in the IntHashMap. Returns null if the
* IntHashMap contains no mapping for the key.
*/
Entry getEntry(Object key) {
int hash = (key == null) ? 0 : hash(hashingStrategy.hashCode(key));
for (Entry e = table[indexFor(hash, table.length)]; e != null; e = e.next) {
Object k;
if (e.hash == hash
&& ((k = e.key) == key || (key != null && hashingStrategy.equals(k, key)))) {
return e;
}
}
return null;
}
/**
* Associates the specified value with the specified key in this map. If the map previously
* contained a mapping for the key, the old value is replaced.
*
* @param key key with which the specified value is to be associated
* @param value value to be associated with the specified key
* @return the previous value associated with <tt>key</tt>, or <tt>null</tt> if there was no
* mapping for <tt>key</tt>. (A <tt>null</tt> return can also indicate that the map
* previously associated <tt>null</tt> with <tt>key</tt>.)
*/
public int put(Object key, int value) {
if (key == null) {
return putForNullKey(value);
}
int hash = hash(hashingStrategy.hashCode(key));
int i = indexFor(hash, table.length);
for (Entry e = table[i]; e != null; e = e.next) {
Object k;
if (e.hash == hash && ((k = e.key) == key || hashingStrategy.equals(k, key))) {
int oldValue = e.value;
e.value = value;
e.recordAccess(this);
return oldValue;
}
}
modCount++;
addEntry(hash, key, value, i);
return 0;
}
/**
* Offloaded version of put for null keys
*/
private int putForNullKey(int value) {
for (Entry e = table[0]; e != null; e = e.next) {
if (e.key == null) {
int oldValue = e.value;
e.value = value;
e.recordAccess(this);
return oldValue;
}
}
modCount++;
addEntry(0, null, value, 0);
return 0;
}
/**
* This method is used instead of put by constructors and pseudoconstructors (clone, readObject).
* It does not resize the table, check for comodification, etc. It calls createEntry rather than
* addEntry.
*/
private void putForCreate(Object key, int value) {
int hash = (key == null) ? 0 : hash(hashingStrategy.hashCode(key));
int i = indexFor(hash, table.length);
/*
* Look for preexisting entry for key. This will never happen for clone or deserialize. It will
* only happen for construction if the input Map is a sorted map whose ordering is inconsistent
* w/ equals.
*/
for (Entry e = table[i]; e != null; e = e.next) {
Object k;
if (e.hash == hash
&& ((k = e.key) == key || (key != null && hashingStrategy.equals(k, key)))) {
e.value = value;
return;
}
}
createEntry(hash, key, value, i);
}
private void putAllForCreate(ObjectIntHashMap m) {
for (Entry e : m.entrySet()) {
putForCreate(e.getKey(), e.getValue());
}
}
/**
* Rehashes the contents of this map into a new array with a larger capacity. This method is
* called automatically when the number of keys in this map reaches its threshold.
*
* If current capacity is MAXIMUM_CAPACITY, this method does not resize the map, but sets
* threshold to Integer.MAX_VALUE. This has the effect of preventing future calls.
*
* @param newCapacity the new capacity, MUST be a power of two; must be greater than current
* capacity unless current capacity is MAXIMUM_CAPACITY (in which case value is
* irrelevant).
*/
void resize(int newCapacity) {
Entry[] oldTable = table;
int oldCapacity = oldTable.length;
if (oldCapacity == MAXIMUM_CAPACITY) {
threshold = Integer.MAX_VALUE;
return;
}
Entry[] newTable = new Entry[newCapacity];
transfer(newTable);
table = newTable;
threshold = (int) (newCapacity * loadFactor);
}
/**
* Transfers all entries from current table to newTable.
*/
void transfer(Entry[] newTable) {
Entry[] src = table;
int newCapacity = newTable.length;
for (int j = 0; j < src.length; j++) {
Entry e = src[j];
if (e != null) {
src[j] = null;
do {
Entry next = e.next;
int i = indexFor(e.hash, newCapacity);
e.next = newTable[i];
newTable[i] = e;
e = next;
} while (e != null);
}
}
}
/**
* Copies all of the mappings from the specified map to this map. These mappings will replace any
* mappings that this map had for any of the keys currently in the specified map.
*
* @param m mappings to be stored in this map
* @throws NullPointerException if the specified map is null
*/
public void putAll(ObjectIntHashMap m) {
int numKeysToBeAdded = m.size();
if (numKeysToBeAdded == 0) {
return;
}
/*
* Expand the map if the map if the number of mappings to be added is greater than or equal to
* threshold. This is conservative; the obvious condition is (m.size() + size) >= threshold, but
* this condition could result in a map with twice the appropriate capacity, if the keys to be
* added overlap with the keys already in this map. By using the conservative calculation, we
* subject ourself to at most one extra resize.
*/
if (numKeysToBeAdded > threshold) {
int targetCapacity = (int) (numKeysToBeAdded / loadFactor + 1);
if (targetCapacity > MAXIMUM_CAPACITY) {
targetCapacity = MAXIMUM_CAPACITY;
}
int newCapacity = table.length;
while (newCapacity < targetCapacity) {
newCapacity <<= 1;
}
if (newCapacity > table.length) {
resize(newCapacity);
}
}
for (Entry e : m.entrySet()) {
put(e.getKey(), e.getValue());
}
}
/**
* Removes the mapping for the specified key from this map if present.
*
* @param key key whose mapping is to be removed from the map
* @return the previous value associated with <tt>key</tt>, or <tt>null</tt> if there was no
* mapping for <tt>key</tt>. (A <tt>null</tt> return can also indicate that the map
* previously associated <tt>null</tt> with <tt>key</tt>.)
*/
public int remove(Object key) {
Entry e = removeEntryForKey(key);
return (e == null ? 0 : e.value);
}
/**
* Removes and returns the entry associated with the specified key in the IntHashMap. Returns null
* if the IntHashMap contains no mapping for this key.
*/
Entry removeEntryForKey(Object key) {
int hash = (key == null) ? 0 : hash(hashingStrategy.hashCode(key));
int i = indexFor(hash, table.length);
Entry prev = table[i];
Entry e = prev;
while (e != null) {
Entry next = e.next;
Object k;
if (e.hash == hash
&& ((k = e.key) == key || (key != null && hashingStrategy.equals(k, key)))) {
modCount++;
size--;
if (prev == e) {
table[i] = next;
} else {
prev.next = next;
}
e.recordRemoval(this);
return e;
}
prev = e;
e = next;
}
return e;
}
/**
* Special version of remove for EntrySet.
*/
Entry removeMapping(Object o) {
if (!(o instanceof Entry)) {
return null;
}
Entry entry = (Entry) o;
Object key = entry.getKey();
int hash = (key == null) ? 0 : hash(hashingStrategy.hashCode(key));
int i = indexFor(hash, table.length);
Entry prev = table[i];
Entry e = prev;
while (e != null) {
Entry next = e.next;
if (e.hash == hash && e.equals(entry)) {
modCount++;
size--;
if (prev == e) {
table[i] = next;
} else {
prev.next = next;
}
e.recordRemoval(this);
return e;
}
prev = e;
e = next;
}
return e;
}
/**
* Removes all of the mappings from this map. The map will be empty after this call returns.
*/
public void clear() {
modCount++;
Entry[] tab = table;
for (int i = 0; i < tab.length; i++) {
tab[i] = null;
}
size = 0;
}
/**
* Returns <tt>true</tt> if this map maps one or more keys to the specified value.
*
* @param value value whose presence in this map is to be tested
* @return <tt>true</tt> if this map maps one or more keys to the specified value
*/
public boolean containsValue(int value) {
Entry[] tab = table;
for (final Entry entry : tab) {
for (Entry e = entry; e != null; e = e.next) {
if (value == e.value) {
return true;
}
}
}
return false;
}
/**
* Returns a shallow copy of this <tt>HashMap</tt> instance: the keys and values themselves are
* not cloned.
*
* @return a shallow copy of this map
*/
@Override
public Object clone() {
ObjectIntHashMap result = null;
try {
result = (ObjectIntHashMap) super.clone();
} catch (CloneNotSupportedException e) {
// assert false;
}
result.table = new Entry[table.length];
result.entrySet = null;
result.modCount = 0;
result.size = 0;
result.init();
result.putAllForCreate(this);
return result;
}
// Comparison and hashing.
/**
* Compares the specified object with this map for equality. Returns <tt>true</tt> if the given
* object is also a map and the two maps represent the same mappings. More formally, two maps
* <tt>m1</tt> and <tt>m2</tt> represent the same mappings if
* <tt>m1.entrySet().equals(m2.entrySet())</tt>. This ensures that the <tt>equals</tt> method
* works properly across different implementations of the <tt>Map</tt> interface.
*
* <p>
* This implementation first checks if the specified object is this map; if so it returns
* <tt>true</tt>. Then, it checks if the specified object is a map whose size is identical to the
* size of this map; if not, it returns <tt>false</tt>. If so, it iterates over this map's
* <tt>entrySet</tt> collection, and checks that the specified map contains each mapping that this
* map contains. If the specified map fails to contain such a mapping, <tt>false</tt> is returned.
* If the iteration completes, <tt>true</tt> is returned.
*
* @param o object to be compared for equality with this map
* @return <tt>true</tt> if the specified object is equal to this map
*/
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (!(o instanceof ObjectIntHashMap)) {
return false;
}
ObjectIntHashMap m = (ObjectIntHashMap) o;
if (m.size() != size()) {
return false;
}
try {
for (final Entry e : entrySet()) {
Object key = e.getKey();
int value = e.getValue();
if (!(m.containsKey(key))) {
return false;
} else if (!(value == m.get(key))) {
return false;
}
}
} catch (ClassCastException unused) {
return false;
} catch (NullPointerException unused) {
return false;
}
return true;
}
/**
* Returns the hash code value for this map. The hash code of a map is defined to be the sum of
* the hash codes of each entry in the map's <tt>entrySet()</tt> view. This ensures that
* <tt>m1.equals(m2)</tt> implies that <tt>m1.hashCode()==m2.hashCode()</tt> for any two maps
* <tt>m1</tt> and <tt>m2</tt>, as required by the general contract of {@link Object#hashCode}.
*
* <p>
* This implementation iterates over <tt>entrySet()</tt>, calling
* {@link java.util.Map.Entry#hashCode() hashCode()} on each element (entry) in the set, and
* adding up the results.
*
* @return the hash code value for this map
* @see java.util.Map.Entry#hashCode()
* @see Object#equals(Object)
* @see Set#equals(Object)
*/
public int hashCode() {
int h = 0;
for (final Entry entry : entrySet()) {
h += entry.hashCode();
}
return h;
}
/**
* Returns a string representation of this map. The string representation consists of a list of
* key-value mappings in the order returned by the map's <tt>entrySet</tt> view's iterator,
* enclosed in braces (<tt>"{}"</tt>). Adjacent mappings are separated by the characters
* <tt>", "</tt> (comma and space). Each key-value mapping is rendered as the key followed by an
* equals sign (<tt>"="</tt>) followed by the associated value. Keys and values are converted to
* strings as by {@link String#valueOf(Object)}.
*
* @return a string representation of this map
*/
public String toString() {
Iterator<Entry> i = entrySet().iterator();
if (!i.hasNext()) {
return "{}";
}
StringBuilder sb = new StringBuilder();
sb.append('{');
for (;;) {
Entry e = i.next();
Object key = e.getKey();
int value = e.getValue();
sb.append(key == this ? "(this Map)" : key);
sb.append('=');
sb.append(value);
if (!i.hasNext()) {
return sb.append('}').toString();
}
sb.append(", ");
}
}
class Entry {
final Object key;
int value; // GemFire Addition.
Entry next;
final int hash;
/**
* Creates new entry.
*/
Entry(int h, Object k, int v, Entry n) {
value = v;
next = n;
key = k;
hash = h;
}
public Object getKey() {
return key;
}
public int getValue() {
return value;
}
public int setValue(int newValue) {
int oldValue = value;
value = newValue;
return oldValue;
}
public boolean equals(Object o) {
if (!(o instanceof Entry)) {
return false;
}
Entry e = (Entry) o;
Object k1 = getKey();
Object k2 = e.getKey();
if (k1 == k2 || (k1 != null && hashingStrategy.equals(k1, k2))) {
int v1 = getValue();
int v2 = e.getValue();
return v1 == v2;
}
return false;
}
public int hashCode() {
return hash ^ value;
}
public String toString() {
return getKey() + "=" + getValue();
}
/**
* This method is invoked whenever the value in an entry is overwritten by an invocation of
* put(k,v) for a key k that's already in the IntHashMap.
*/
void recordAccess(ObjectIntHashMap m) {}
/**
* This method is invoked whenever the entry is removed from the table.
*/
void recordRemoval(ObjectIntHashMap m) {}
}
/**
* Adds a new entry with the specified key, value and hash code to the specified bucket. It is the
* responsibility of this method to resize the table if appropriate.
*
* Subclass overrides this to alter the behavior of put method.
*/
void addEntry(int hash, Object key, int value, int bucketIndex) {
Entry e = table[bucketIndex];
table[bucketIndex] = new Entry(hash, key, value, e);
if (size++ >= threshold) {
resize(2 * table.length);
}
}
/**
* Like addEntry except that this version is used when creating entries as part of Map
* construction or "pseudo-construction" (cloning, deserialization). This version needn't worry
* about resizing the table.
*
* Subclass overrides this to alter the behavior of IntHashMap(Map), clone, and readObject.
*/
void createEntry(int hash, Object key, int value, int bucketIndex) {
Entry e = table[bucketIndex];
table[bucketIndex] = new Entry(hash, key, value, e);
size++;
}
private abstract class HashIterator<E> implements Iterator<E> {
Entry next; // next entry to return
int expectedModCount; // For fast-fail
int index; // current slot
Entry current; // current entry
HashIterator() {
expectedModCount = modCount;
if (size > 0) { // advance to first entry
Entry[] t = table;
while (index < t.length && (next = t[index++]) == null) {
}
}
}
@Override
public boolean hasNext() {
return next != null;
}
Entry nextEntry() {
if (modCount != expectedModCount) {
throw new ConcurrentModificationException();
}
Entry e = next;
if (e == null) {
throw new NoSuchElementException();
}
if ((next = e.next) == null) {
Entry[] t = table;
while (index < t.length && (next = t[index++]) == null) {
}
}
current = e;
return e;
}
@Override
public void remove() {
if (current == null) {
throw new IllegalStateException();
}
if (modCount != expectedModCount) {
throw new ConcurrentModificationException();
}
Object k = current.key;
current = null;
removeEntryForKey(k);
expectedModCount = modCount;
}
}
private class KeyIterator extends HashIterator<Object> {
@Override
public Object next() {
return nextEntry().getKey();
}
}
private class EntryIterator extends HashIterator<Entry> {
@Override
public Entry next() {
return nextEntry();
}
}
// Subclass overrides these to alter behavior of views' iterator() method
Iterator<Object> newKeyIterator() {
return new KeyIterator();
}
Iterator<Entry> newEntryIterator() {
return new EntryIterator();
}
// Views
private transient Set<Entry> entrySet = null;
private transient Set<Object> keySet = null;
/**
* Returns a {@link Set} view of the keys contained in this map. The set is backed by the map, so
* changes to the map are reflected in the set, and vice-versa. If the map is modified while an
* iteration over the set is in progress (except through the iterator's own <tt>remove</tt>
* operation), the results of the iteration are undefined. The set supports element removal, which
* removes the corresponding mapping from the map, via the <tt>Iterator.remove</tt>,
* <tt>Set.remove</tt>, <tt>removeAll</tt>, <tt>retainAll</tt>, and <tt>clear</tt> operations. It
* does not support the <tt>add</tt> or <tt>addAll</tt> operations.
*/
public Set<Object> keySet() {
Set<Object> ks = keySet;
return (ks != null ? ks : (keySet = new KeySet()));
}
private class KeySet extends AbstractSet<Object> {
@Override
public Iterator<Object> iterator() {
return newKeyIterator();
}
@Override
public int size() {
return size;
}
@Override
public boolean contains(Object o) {
return containsKey(o);
}
@Override
public boolean remove(Object o) {
return removeEntryForKey(o) != null;
}
@Override
public void clear() {
ObjectIntHashMap.this.clear();
}
}
/**
* Returns a {@link Set} view of the mappings contained in this map. The set is backed by the map,
* so changes to the map are reflected in the set, and vice-versa. If the map is modified while an
* iteration over the set is in progress (except through the iterator's own <tt>remove</tt>
* operation, or through the <tt>setValue</tt> operation on a map entry returned by the iterator)
* the results of the iteration are undefined. The set supports element removal, which removes the
* corresponding mapping from the map, via the <tt>Iterator.remove</tt>, <tt>Set.remove</tt>,
* <tt>removeAll</tt>, <tt>retainAll</tt> and <tt>clear</tt> operations. It does not support the
* <tt>add</tt> or <tt>addAll</tt> operations.
*
* @return a set view of the mappings contained in this map
*/
public Set<Entry> entrySet() {
return entrySet0();
}
private Set<Entry> entrySet0() {
Set<Entry> es = entrySet;
return es != null ? es : (entrySet = new EntrySet());
}
private class EntrySet extends AbstractSet<Entry> {
@Override
public Iterator<Entry> iterator() {
return newEntryIterator();
}
@Override
public boolean contains(Object o) {
if (!(o instanceof Entry)) {
return false;
}
Entry e = (Entry) o;
Entry candidate = getEntry(e.getKey());
return candidate != null && candidate.equals(e);
}
@Override
public boolean remove(Object o) {
return removeMapping(o) != null;
}
@Override
public int size() {
return size;
}
@Override
public void clear() {
ObjectIntHashMap.this.clear();
}
}
/**
* Save the state of the <tt>HashMap</tt> instance to a stream (i.e., serialize it).
*
* @serialData The <i>capacity</i> of the IntHashMap (the length of the bucket array) is emitted
* (int), followed by the <i>size</i> (an int, the number of key-value mappings),
* followed by the key (Object) and value (Object) for each key-value mapping. The
* key-value mappings are emitted in no particular order.
*/
private void writeObject(java.io.ObjectOutputStream s) throws IOException {
Iterator<Entry> i = (size > 0) ? entrySet0().iterator() : null;
// Write out the threshold, loadfactor, and any hidden stuff
s.defaultWriteObject();
// Write out number of buckets
s.writeInt(table.length);
// Write out size (number of Mappings)
s.writeInt(size);
// Write out keys and values (alternating)
if (i != null) {
while (i.hasNext()) {
Entry e = i.next();
s.writeObject(e.getKey());
s.writeInt(e.getValue());
}
}
}
/**
* Reconstitute the <tt>HashMap</tt> instance from a stream (i.e., deserialize it).
*/
private void readObject(java.io.ObjectInputStream s) throws IOException, ClassNotFoundException {
// Read in the threshold, loadfactor, and any hidden stuff
s.defaultReadObject();
// Read in number of buckets and allocate the bucket array;
int numBuckets = s.readInt();
table = new Entry[numBuckets];
init(); // Give subclass a chance to do its thing.
// Read in size (number of Mappings)
int size = s.readInt();
// Read the keys and values, and put the mappings in the IntHashMap
for (int i = 0; i < size; i++) {
Object key = s.readObject();
int value = s.readInt();
putForCreate(key, value);
}
}
// These methods are used when serializing HashSets
int capacity() {
return table.length;
}
float loadFactor() {
return loadFactor;
}
private class IntHashMapStrategy implements HashingStrategy {
@Override
public int hashCode(Object o) {
return o.hashCode();
}
@Override
public boolean equals(Object o1, Object o2) {
if (o1 == null && o2 == null) {
return true;
}
if (o1 == null || o2 == null) {
return false;
}
return o1.equals(o2);
}
}
}
|
googleapis/google-cloud-java | 36,007 | java-datalabeling/proto-google-cloud-datalabeling-v1beta1/src/main/java/com/google/cloud/datalabeling/v1beta1/ListExamplesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datalabeling/v1beta1/data_labeling_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datalabeling.v1beta1;
/**
*
*
* <pre>
* Results of listing Examples in and annotated dataset.
* </pre>
*
* Protobuf type {@code google.cloud.datalabeling.v1beta1.ListExamplesResponse}
*/
public final class ListExamplesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datalabeling.v1beta1.ListExamplesResponse)
ListExamplesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListExamplesResponse.newBuilder() to construct.
private ListExamplesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListExamplesResponse() {
examples_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListExamplesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListExamplesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListExamplesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datalabeling.v1beta1.ListExamplesResponse.class,
com.google.cloud.datalabeling.v1beta1.ListExamplesResponse.Builder.class);
}
public static final int EXAMPLES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.datalabeling.v1beta1.Example> examples_;
/**
*
*
* <pre>
* The list of examples to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Example examples = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.datalabeling.v1beta1.Example> getExamplesList() {
return examples_;
}
/**
*
*
* <pre>
* The list of examples to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Example examples = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.datalabeling.v1beta1.ExampleOrBuilder>
getExamplesOrBuilderList() {
return examples_;
}
/**
*
*
* <pre>
* The list of examples to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Example examples = 1;</code>
*/
@java.lang.Override
public int getExamplesCount() {
return examples_.size();
}
/**
*
*
* <pre>
* The list of examples to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Example examples = 1;</code>
*/
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.Example getExamples(int index) {
return examples_.get(index);
}
/**
*
*
* <pre>
* The list of examples to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Example examples = 1;</code>
*/
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ExampleOrBuilder getExamplesOrBuilder(int index) {
return examples_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < examples_.size(); i++) {
output.writeMessage(1, examples_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < examples_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, examples_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datalabeling.v1beta1.ListExamplesResponse)) {
return super.equals(obj);
}
com.google.cloud.datalabeling.v1beta1.ListExamplesResponse other =
(com.google.cloud.datalabeling.v1beta1.ListExamplesResponse) obj;
if (!getExamplesList().equals(other.getExamplesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getExamplesCount() > 0) {
hash = (37 * hash) + EXAMPLES_FIELD_NUMBER;
hash = (53 * hash) + getExamplesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datalabeling.v1beta1.ListExamplesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Results of listing Examples in and annotated dataset.
* </pre>
*
* Protobuf type {@code google.cloud.datalabeling.v1beta1.ListExamplesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datalabeling.v1beta1.ListExamplesResponse)
com.google.cloud.datalabeling.v1beta1.ListExamplesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListExamplesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListExamplesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datalabeling.v1beta1.ListExamplesResponse.class,
com.google.cloud.datalabeling.v1beta1.ListExamplesResponse.Builder.class);
}
// Construct using com.google.cloud.datalabeling.v1beta1.ListExamplesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (examplesBuilder_ == null) {
examples_ = java.util.Collections.emptyList();
} else {
examples_ = null;
examplesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListExamplesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ListExamplesResponse getDefaultInstanceForType() {
return com.google.cloud.datalabeling.v1beta1.ListExamplesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ListExamplesResponse build() {
com.google.cloud.datalabeling.v1beta1.ListExamplesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ListExamplesResponse buildPartial() {
com.google.cloud.datalabeling.v1beta1.ListExamplesResponse result =
new com.google.cloud.datalabeling.v1beta1.ListExamplesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.datalabeling.v1beta1.ListExamplesResponse result) {
if (examplesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
examples_ = java.util.Collections.unmodifiableList(examples_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.examples_ = examples_;
} else {
result.examples_ = examplesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.datalabeling.v1beta1.ListExamplesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datalabeling.v1beta1.ListExamplesResponse) {
return mergeFrom((com.google.cloud.datalabeling.v1beta1.ListExamplesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datalabeling.v1beta1.ListExamplesResponse other) {
if (other == com.google.cloud.datalabeling.v1beta1.ListExamplesResponse.getDefaultInstance())
return this;
if (examplesBuilder_ == null) {
if (!other.examples_.isEmpty()) {
if (examples_.isEmpty()) {
examples_ = other.examples_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureExamplesIsMutable();
examples_.addAll(other.examples_);
}
onChanged();
}
} else {
if (!other.examples_.isEmpty()) {
if (examplesBuilder_.isEmpty()) {
examplesBuilder_.dispose();
examplesBuilder_ = null;
examples_ = other.examples_;
bitField0_ = (bitField0_ & ~0x00000001);
examplesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getExamplesFieldBuilder()
: null;
} else {
examplesBuilder_.addAllMessages(other.examples_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.datalabeling.v1beta1.Example m =
input.readMessage(
com.google.cloud.datalabeling.v1beta1.Example.parser(), extensionRegistry);
if (examplesBuilder_ == null) {
ensureExamplesIsMutable();
examples_.add(m);
} else {
examplesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.datalabeling.v1beta1.Example> examples_ =
java.util.Collections.emptyList();
private void ensureExamplesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
examples_ =
new java.util.ArrayList<com.google.cloud.datalabeling.v1beta1.Example>(examples_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datalabeling.v1beta1.Example,
com.google.cloud.datalabeling.v1beta1.Example.Builder,
com.google.cloud.datalabeling.v1beta1.ExampleOrBuilder>
examplesBuilder_;
/**
*
*
* <pre>
* The list of examples to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Example examples = 1;</code>
*/
public java.util.List<com.google.cloud.datalabeling.v1beta1.Example> getExamplesList() {
if (examplesBuilder_ == null) {
return java.util.Collections.unmodifiableList(examples_);
} else {
return examplesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of examples to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Example examples = 1;</code>
*/
public int getExamplesCount() {
if (examplesBuilder_ == null) {
return examples_.size();
} else {
return examplesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of examples to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Example examples = 1;</code>
*/
public com.google.cloud.datalabeling.v1beta1.Example getExamples(int index) {
if (examplesBuilder_ == null) {
return examples_.get(index);
} else {
return examplesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of examples to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Example examples = 1;</code>
*/
public Builder setExamples(int index, com.google.cloud.datalabeling.v1beta1.Example value) {
if (examplesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureExamplesIsMutable();
examples_.set(index, value);
onChanged();
} else {
examplesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of examples to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Example examples = 1;</code>
*/
public Builder setExamples(
int index, com.google.cloud.datalabeling.v1beta1.Example.Builder builderForValue) {
if (examplesBuilder_ == null) {
ensureExamplesIsMutable();
examples_.set(index, builderForValue.build());
onChanged();
} else {
examplesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of examples to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Example examples = 1;</code>
*/
public Builder addExamples(com.google.cloud.datalabeling.v1beta1.Example value) {
if (examplesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureExamplesIsMutable();
examples_.add(value);
onChanged();
} else {
examplesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of examples to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Example examples = 1;</code>
*/
public Builder addExamples(int index, com.google.cloud.datalabeling.v1beta1.Example value) {
if (examplesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureExamplesIsMutable();
examples_.add(index, value);
onChanged();
} else {
examplesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of examples to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Example examples = 1;</code>
*/
public Builder addExamples(
com.google.cloud.datalabeling.v1beta1.Example.Builder builderForValue) {
if (examplesBuilder_ == null) {
ensureExamplesIsMutable();
examples_.add(builderForValue.build());
onChanged();
} else {
examplesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of examples to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Example examples = 1;</code>
*/
public Builder addExamples(
int index, com.google.cloud.datalabeling.v1beta1.Example.Builder builderForValue) {
if (examplesBuilder_ == null) {
ensureExamplesIsMutable();
examples_.add(index, builderForValue.build());
onChanged();
} else {
examplesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of examples to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Example examples = 1;</code>
*/
public Builder addAllExamples(
java.lang.Iterable<? extends com.google.cloud.datalabeling.v1beta1.Example> values) {
if (examplesBuilder_ == null) {
ensureExamplesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, examples_);
onChanged();
} else {
examplesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of examples to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Example examples = 1;</code>
*/
public Builder clearExamples() {
if (examplesBuilder_ == null) {
examples_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
examplesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of examples to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Example examples = 1;</code>
*/
public Builder removeExamples(int index) {
if (examplesBuilder_ == null) {
ensureExamplesIsMutable();
examples_.remove(index);
onChanged();
} else {
examplesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of examples to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Example examples = 1;</code>
*/
public com.google.cloud.datalabeling.v1beta1.Example.Builder getExamplesBuilder(int index) {
return getExamplesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of examples to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Example examples = 1;</code>
*/
public com.google.cloud.datalabeling.v1beta1.ExampleOrBuilder getExamplesOrBuilder(int index) {
if (examplesBuilder_ == null) {
return examples_.get(index);
} else {
return examplesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of examples to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Example examples = 1;</code>
*/
public java.util.List<? extends com.google.cloud.datalabeling.v1beta1.ExampleOrBuilder>
getExamplesOrBuilderList() {
if (examplesBuilder_ != null) {
return examplesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(examples_);
}
}
/**
*
*
* <pre>
* The list of examples to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Example examples = 1;</code>
*/
public com.google.cloud.datalabeling.v1beta1.Example.Builder addExamplesBuilder() {
return getExamplesFieldBuilder()
.addBuilder(com.google.cloud.datalabeling.v1beta1.Example.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of examples to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Example examples = 1;</code>
*/
public com.google.cloud.datalabeling.v1beta1.Example.Builder addExamplesBuilder(int index) {
return getExamplesFieldBuilder()
.addBuilder(index, com.google.cloud.datalabeling.v1beta1.Example.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of examples to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Example examples = 1;</code>
*/
public java.util.List<com.google.cloud.datalabeling.v1beta1.Example.Builder>
getExamplesBuilderList() {
return getExamplesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datalabeling.v1beta1.Example,
com.google.cloud.datalabeling.v1beta1.Example.Builder,
com.google.cloud.datalabeling.v1beta1.ExampleOrBuilder>
getExamplesFieldBuilder() {
if (examplesBuilder_ == null) {
examplesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datalabeling.v1beta1.Example,
com.google.cloud.datalabeling.v1beta1.Example.Builder,
com.google.cloud.datalabeling.v1beta1.ExampleOrBuilder>(
examples_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
examples_ = null;
}
return examplesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datalabeling.v1beta1.ListExamplesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ListExamplesResponse)
private static final com.google.cloud.datalabeling.v1beta1.ListExamplesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datalabeling.v1beta1.ListExamplesResponse();
}
public static com.google.cloud.datalabeling.v1beta1.ListExamplesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListExamplesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListExamplesResponse>() {
@java.lang.Override
public ListExamplesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListExamplesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListExamplesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ListExamplesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,079 | java-securitycenter/proto-google-cloud-securitycenter-v1/src/main/java/com/google/cloud/securitycenter/v1/UpdateOrganizationSettingsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/securitycenter/v1/securitycenter_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.securitycenter.v1;
/**
*
*
* <pre>
* Request message for updating an organization's settings.
* </pre>
*
* Protobuf type {@code google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest}
*/
public final class UpdateOrganizationSettingsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest)
UpdateOrganizationSettingsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateOrganizationSettingsRequest.newBuilder() to construct.
private UpdateOrganizationSettingsRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateOrganizationSettingsRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateOrganizationSettingsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1_UpdateOrganizationSettingsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1_UpdateOrganizationSettingsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest.class,
com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest.Builder.class);
}
private int bitField0_;
public static final int ORGANIZATION_SETTINGS_FIELD_NUMBER = 1;
private com.google.cloud.securitycenter.v1.OrganizationSettings organizationSettings_;
/**
*
*
* <pre>
* Required. The organization settings resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the organizationSettings field is set.
*/
@java.lang.Override
public boolean hasOrganizationSettings() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The organization settings resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The organizationSettings.
*/
@java.lang.Override
public com.google.cloud.securitycenter.v1.OrganizationSettings getOrganizationSettings() {
return organizationSettings_ == null
? com.google.cloud.securitycenter.v1.OrganizationSettings.getDefaultInstance()
: organizationSettings_;
}
/**
*
*
* <pre>
* Required. The organization settings resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.securitycenter.v1.OrganizationSettingsOrBuilder
getOrganizationSettingsOrBuilder() {
return organizationSettings_ == null
? com.google.cloud.securitycenter.v1.OrganizationSettings.getDefaultInstance()
: organizationSettings_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* The FieldMask to use when updating the settings resource.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The FieldMask to use when updating the settings resource.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* The FieldMask to use when updating the settings resource.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getOrganizationSettings());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(1, getOrganizationSettings());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest)) {
return super.equals(obj);
}
com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest other =
(com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest) obj;
if (hasOrganizationSettings() != other.hasOrganizationSettings()) return false;
if (hasOrganizationSettings()) {
if (!getOrganizationSettings().equals(other.getOrganizationSettings())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasOrganizationSettings()) {
hash = (37 * hash) + ORGANIZATION_SETTINGS_FIELD_NUMBER;
hash = (53 * hash) + getOrganizationSettings().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for updating an organization's settings.
* </pre>
*
* Protobuf type {@code google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest)
com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1_UpdateOrganizationSettingsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1_UpdateOrganizationSettingsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest.class,
com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest.Builder.class);
}
// Construct using
// com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getOrganizationSettingsFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
organizationSettings_ = null;
if (organizationSettingsBuilder_ != null) {
organizationSettingsBuilder_.dispose();
organizationSettingsBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.securitycenter.v1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1_UpdateOrganizationSettingsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest
getDefaultInstanceForType() {
return com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest build() {
com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest buildPartial() {
com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest result =
new com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.organizationSettings_ =
organizationSettingsBuilder_ == null
? organizationSettings_
: organizationSettingsBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest) {
return mergeFrom(
(com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest other) {
if (other
== com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest
.getDefaultInstance()) return this;
if (other.hasOrganizationSettings()) {
mergeOrganizationSettings(other.getOrganizationSettings());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(
getOrganizationSettingsFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.securitycenter.v1.OrganizationSettings organizationSettings_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securitycenter.v1.OrganizationSettings,
com.google.cloud.securitycenter.v1.OrganizationSettings.Builder,
com.google.cloud.securitycenter.v1.OrganizationSettingsOrBuilder>
organizationSettingsBuilder_;
/**
*
*
* <pre>
* Required. The organization settings resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the organizationSettings field is set.
*/
public boolean hasOrganizationSettings() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The organization settings resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The organizationSettings.
*/
public com.google.cloud.securitycenter.v1.OrganizationSettings getOrganizationSettings() {
if (organizationSettingsBuilder_ == null) {
return organizationSettings_ == null
? com.google.cloud.securitycenter.v1.OrganizationSettings.getDefaultInstance()
: organizationSettings_;
} else {
return organizationSettingsBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The organization settings resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setOrganizationSettings(
com.google.cloud.securitycenter.v1.OrganizationSettings value) {
if (organizationSettingsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
organizationSettings_ = value;
} else {
organizationSettingsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The organization settings resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setOrganizationSettings(
com.google.cloud.securitycenter.v1.OrganizationSettings.Builder builderForValue) {
if (organizationSettingsBuilder_ == null) {
organizationSettings_ = builderForValue.build();
} else {
organizationSettingsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The organization settings resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeOrganizationSettings(
com.google.cloud.securitycenter.v1.OrganizationSettings value) {
if (organizationSettingsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& organizationSettings_ != null
&& organizationSettings_
!= com.google.cloud.securitycenter.v1.OrganizationSettings.getDefaultInstance()) {
getOrganizationSettingsBuilder().mergeFrom(value);
} else {
organizationSettings_ = value;
}
} else {
organizationSettingsBuilder_.mergeFrom(value);
}
if (organizationSettings_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The organization settings resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearOrganizationSettings() {
bitField0_ = (bitField0_ & ~0x00000001);
organizationSettings_ = null;
if (organizationSettingsBuilder_ != null) {
organizationSettingsBuilder_.dispose();
organizationSettingsBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The organization settings resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.securitycenter.v1.OrganizationSettings.Builder
getOrganizationSettingsBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getOrganizationSettingsFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The organization settings resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.securitycenter.v1.OrganizationSettingsOrBuilder
getOrganizationSettingsOrBuilder() {
if (organizationSettingsBuilder_ != null) {
return organizationSettingsBuilder_.getMessageOrBuilder();
} else {
return organizationSettings_ == null
? com.google.cloud.securitycenter.v1.OrganizationSettings.getDefaultInstance()
: organizationSettings_;
}
}
/**
*
*
* <pre>
* Required. The organization settings resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securitycenter.v1.OrganizationSettings,
com.google.cloud.securitycenter.v1.OrganizationSettings.Builder,
com.google.cloud.securitycenter.v1.OrganizationSettingsOrBuilder>
getOrganizationSettingsFieldBuilder() {
if (organizationSettingsBuilder_ == null) {
organizationSettingsBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securitycenter.v1.OrganizationSettings,
com.google.cloud.securitycenter.v1.OrganizationSettings.Builder,
com.google.cloud.securitycenter.v1.OrganizationSettingsOrBuilder>(
getOrganizationSettings(), getParentForChildren(), isClean());
organizationSettings_ = null;
}
return organizationSettingsBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* The FieldMask to use when updating the settings resource.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The FieldMask to use when updating the settings resource.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The FieldMask to use when updating the settings resource.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The FieldMask to use when updating the settings resource.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The FieldMask to use when updating the settings resource.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The FieldMask to use when updating the settings resource.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The FieldMask to use when updating the settings resource.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The FieldMask to use when updating the settings resource.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* The FieldMask to use when updating the settings resource.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest)
private static final com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest();
}
public static com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateOrganizationSettingsRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateOrganizationSettingsRequest>() {
@java.lang.Override
public UpdateOrganizationSettingsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateOrganizationSettingsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateOrganizationSettingsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,090 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/QuestionAnsweringRelevanceInput.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/evaluation_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Input for question answering relevance metric.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput}
*/
public final class QuestionAnsweringRelevanceInput extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput)
QuestionAnsweringRelevanceInputOrBuilder {
private static final long serialVersionUID = 0L;
// Use QuestionAnsweringRelevanceInput.newBuilder() to construct.
private QuestionAnsweringRelevanceInput(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private QuestionAnsweringRelevanceInput() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new QuestionAnsweringRelevanceInput();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_QuestionAnsweringRelevanceInput_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_QuestionAnsweringRelevanceInput_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput.class,
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput.Builder.class);
}
private int bitField0_;
public static final int METRIC_SPEC_FIELD_NUMBER = 1;
private com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec metricSpec_;
/**
*
*
* <pre>
* Required. Spec for question answering relevance score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the metricSpec field is set.
*/
@java.lang.Override
public boolean hasMetricSpec() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Spec for question answering relevance score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The metricSpec.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec getMetricSpec() {
return metricSpec_ == null
? com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec.getDefaultInstance()
: metricSpec_;
}
/**
*
*
* <pre>
* Required. Spec for question answering relevance score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpecOrBuilder
getMetricSpecOrBuilder() {
return metricSpec_ == null
? com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec.getDefaultInstance()
: metricSpec_;
}
public static final int INSTANCE_FIELD_NUMBER = 2;
private com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance instance_;
/**
*
*
* <pre>
* Required. Question answering relevance instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the instance field is set.
*/
@java.lang.Override
public boolean hasInstance() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Question answering relevance instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The instance.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance getInstance() {
return instance_ == null
? com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance.getDefaultInstance()
: instance_;
}
/**
*
*
* <pre>
* Required. Question answering relevance instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstanceOrBuilder
getInstanceOrBuilder() {
return instance_ == null
? com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance.getDefaultInstance()
: instance_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getMetricSpec());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getInstance());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getMetricSpec());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getInstance());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput other =
(com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput) obj;
if (hasMetricSpec() != other.hasMetricSpec()) return false;
if (hasMetricSpec()) {
if (!getMetricSpec().equals(other.getMetricSpec())) return false;
}
if (hasInstance() != other.hasInstance()) return false;
if (hasInstance()) {
if (!getInstance().equals(other.getInstance())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasMetricSpec()) {
hash = (37 * hash) + METRIC_SPEC_FIELD_NUMBER;
hash = (53 * hash) + getMetricSpec().hashCode();
}
if (hasInstance()) {
hash = (37 * hash) + INSTANCE_FIELD_NUMBER;
hash = (53 * hash) + getInstance().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Input for question answering relevance metric.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput)
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInputOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_QuestionAnsweringRelevanceInput_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_QuestionAnsweringRelevanceInput_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput.class,
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getMetricSpecFieldBuilder();
getInstanceFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
metricSpec_ = null;
if (metricSpecBuilder_ != null) {
metricSpecBuilder_.dispose();
metricSpecBuilder_ = null;
}
instance_ = null;
if (instanceBuilder_ != null) {
instanceBuilder_.dispose();
instanceBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_QuestionAnsweringRelevanceInput_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput build() {
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput buildPartial() {
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput result =
new com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.metricSpec_ = metricSpecBuilder_ == null ? metricSpec_ : metricSpecBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.instance_ = instanceBuilder_ == null ? instance_ : instanceBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput) {
return mergeFrom((com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput other) {
if (other
== com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput.getDefaultInstance())
return this;
if (other.hasMetricSpec()) {
mergeMetricSpec(other.getMetricSpec());
}
if (other.hasInstance()) {
mergeInstance(other.getInstance());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getMetricSpecFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getInstanceFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec metricSpec_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec,
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec.Builder,
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpecOrBuilder>
metricSpecBuilder_;
/**
*
*
* <pre>
* Required. Spec for question answering relevance score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the metricSpec field is set.
*/
public boolean hasMetricSpec() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Spec for question answering relevance score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The metricSpec.
*/
public com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec getMetricSpec() {
if (metricSpecBuilder_ == null) {
return metricSpec_ == null
? com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec.getDefaultInstance()
: metricSpec_;
} else {
return metricSpecBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Spec for question answering relevance score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setMetricSpec(
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec value) {
if (metricSpecBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
metricSpec_ = value;
} else {
metricSpecBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for question answering relevance score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setMetricSpec(
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec.Builder builderForValue) {
if (metricSpecBuilder_ == null) {
metricSpec_ = builderForValue.build();
} else {
metricSpecBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for question answering relevance score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeMetricSpec(
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec value) {
if (metricSpecBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& metricSpec_ != null
&& metricSpec_
!= com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec
.getDefaultInstance()) {
getMetricSpecBuilder().mergeFrom(value);
} else {
metricSpec_ = value;
}
} else {
metricSpecBuilder_.mergeFrom(value);
}
if (metricSpec_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Spec for question answering relevance score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearMetricSpec() {
bitField0_ = (bitField0_ & ~0x00000001);
metricSpec_ = null;
if (metricSpecBuilder_ != null) {
metricSpecBuilder_.dispose();
metricSpecBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for question answering relevance score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec.Builder
getMetricSpecBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getMetricSpecFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Spec for question answering relevance score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpecOrBuilder
getMetricSpecOrBuilder() {
if (metricSpecBuilder_ != null) {
return metricSpecBuilder_.getMessageOrBuilder();
} else {
return metricSpec_ == null
? com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec.getDefaultInstance()
: metricSpec_;
}
}
/**
*
*
* <pre>
* Required. Spec for question answering relevance score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec,
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec.Builder,
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpecOrBuilder>
getMetricSpecFieldBuilder() {
if (metricSpecBuilder_ == null) {
metricSpecBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec,
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpec.Builder,
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceSpecOrBuilder>(
getMetricSpec(), getParentForChildren(), isClean());
metricSpec_ = null;
}
return metricSpecBuilder_;
}
private com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance instance_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance,
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance.Builder,
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstanceOrBuilder>
instanceBuilder_;
/**
*
*
* <pre>
* Required. Question answering relevance instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the instance field is set.
*/
public boolean hasInstance() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Question answering relevance instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The instance.
*/
public com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance getInstance() {
if (instanceBuilder_ == null) {
return instance_ == null
? com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance.getDefaultInstance()
: instance_;
} else {
return instanceBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Question answering relevance instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInstance(
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance value) {
if (instanceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
instance_ = value;
} else {
instanceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Question answering relevance instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInstance(
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance.Builder builderForValue) {
if (instanceBuilder_ == null) {
instance_ = builderForValue.build();
} else {
instanceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Question answering relevance instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeInstance(
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance value) {
if (instanceBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& instance_ != null
&& instance_
!= com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance
.getDefaultInstance()) {
getInstanceBuilder().mergeFrom(value);
} else {
instance_ = value;
}
} else {
instanceBuilder_.mergeFrom(value);
}
if (instance_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Question answering relevance instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearInstance() {
bitField0_ = (bitField0_ & ~0x00000002);
instance_ = null;
if (instanceBuilder_ != null) {
instanceBuilder_.dispose();
instanceBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Question answering relevance instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance.Builder
getInstanceBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getInstanceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Question answering relevance instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstanceOrBuilder
getInstanceOrBuilder() {
if (instanceBuilder_ != null) {
return instanceBuilder_.getMessageOrBuilder();
} else {
return instance_ == null
? com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance.getDefaultInstance()
: instance_;
}
}
/**
*
*
* <pre>
* Required. Question answering relevance instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance,
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance.Builder,
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstanceOrBuilder>
getInstanceFieldBuilder() {
if (instanceBuilder_ == null) {
instanceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance,
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstance.Builder,
com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInstanceOrBuilder>(
getInstance(), getParentForChildren(), isClean());
instance_ = null;
}
return instanceBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput)
private static final com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput();
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<QuestionAnsweringRelevanceInput> PARSER =
new com.google.protobuf.AbstractParser<QuestionAnsweringRelevanceInput>() {
@java.lang.Override
public QuestionAnsweringRelevanceInput parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<QuestionAnsweringRelevanceInput> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<QuestionAnsweringRelevanceInput> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.QuestionAnsweringRelevanceInput
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/ofbiz-framework | 36,211 | framework/entityext/src/main/java/org/apache/ofbiz/entityext/synchronization/EntitySyncServices.java | /*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package org.apache.ofbiz.entityext.synchronization;
import static org.apache.ofbiz.base.util.UtilGenerics.checkCollection;
import java.io.IOException;
import java.net.URL;
import java.sql.Timestamp;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.ofbiz.base.util.Debug;
import org.apache.ofbiz.base.util.UtilGenerics;
import org.apache.ofbiz.base.util.UtilMisc;
import org.apache.ofbiz.base.util.UtilProperties;
import org.apache.ofbiz.base.util.UtilURL;
import org.apache.ofbiz.base.util.UtilValidate;
import org.apache.ofbiz.base.util.UtilXml;
import org.apache.ofbiz.entity.Delegator;
import org.apache.ofbiz.entity.DelegatorFactory;
import org.apache.ofbiz.entity.GenericEntity;
import org.apache.ofbiz.entity.GenericEntityException;
import org.apache.ofbiz.entity.GenericValue;
import org.apache.ofbiz.entity.condition.EntityCondition;
import org.apache.ofbiz.entity.condition.EntityOperator;
import org.apache.ofbiz.entity.model.ModelEntity;
import org.apache.ofbiz.entity.serialize.SerializeException;
import org.apache.ofbiz.entity.serialize.XmlSerializer;
import org.apache.ofbiz.entity.util.EntityQuery;
import org.apache.ofbiz.entityext.synchronization.EntitySyncContext.SyncAbortException;
import org.apache.ofbiz.entityext.synchronization.EntitySyncContext.SyncErrorException;
import org.apache.ofbiz.service.DispatchContext;
import org.apache.ofbiz.service.GenericServiceException;
import org.apache.ofbiz.service.LocalDispatcher;
import org.apache.ofbiz.service.ServiceUtil;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.xml.sax.SAXException;
import com.ibm.icu.util.Calendar;
/**
* Entity Engine Sync Services
*/
public class EntitySyncServices {
private static final String MODULE = EntitySyncServices.class.getName();
private static final String RESOURCE = "EntityExtUiLabels";
/**
* Run an Entity Sync (checks to see if other already running, etc)
*@param dctx The DispatchContext that this service is operating in
*@param context Map containing the input parameters
*@return Map with the result of the service, the output parameters
*/
public static Map<String, Object> runEntitySync(DispatchContext dctx, Map<String, ? extends Object> context) {
Locale locale = (Locale) context.get("locale");
EntitySyncContext esc = null;
try {
esc = new EntitySyncContext(dctx, context);
if ("Y".equals(esc.getEntitySync().get("forPullOnly"))) {
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE, "EntityExtCannotDoEntitySyncPush", locale));
}
esc.runPushStartRunning();
// increment starting time to run until now
esc.setSplitStartTime(); // just run this the first time, will be updated between each loop automatically
while (esc.hasMoreTimeToSync()) {
// this will result in lots of log messages, so leaving commented out unless needed/wanted later
// Debug.logInfo("Doing runEntitySync split, currentRunStartTime=" + esc.currentRunStartTime + ", currentRunEndTime="
// + esc.currentRunEndTime, MODULE);
esc.setTotalSplits(esc.getTotalSplits() + 1);
// tx times are indexed
// keep track of how long these sync runs take and store that info on the history table
// saves info about removed, all entities that don't have no-auto-stamp set, this will be done in the GenericDAO like the stamp sets
// ===== INSERTS =====
ArrayList<GenericValue> valuesToCreate = esc.assembleValuesToCreate();
// ===== UPDATES =====
ArrayList<GenericValue> valuesToStore = esc.assembleValuesToStore();
// ===== DELETES =====
List<GenericEntity> keysToRemove = esc.assembleKeysToRemove();
esc.runPushSendData(valuesToCreate, valuesToStore, keysToRemove);
esc.saveResultsReportedFromDataStore();
esc.advanceRunTimes();
}
esc.saveFinalSyncResults();
} catch (SyncAbortException e) {
return e.returnError(MODULE);
} catch (SyncErrorException e) {
e.saveSyncErrorInfo(esc);
return e.returnError(MODULE);
}
return ServiceUtil.returnSuccess();
}
/**
* Store Entity Sync Data
*@param dctx The DispatchContext that this service is operating in
*@param context Map containing the input parameters
*@return Map with the result of the service, the output parameters
*/
public static Map<String, Object> storeEntitySyncData(DispatchContext dctx, Map<String, Object> context) {
Delegator delegator = dctx.getDelegator();
String overrideDelegatorName = (String) context.get("delegatorName");
Locale locale = (Locale) context.get("locale");
if (UtilValidate.isNotEmpty(overrideDelegatorName)) {
delegator = DelegatorFactory.getDelegator(overrideDelegatorName);
if (delegator == null) {
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE, "EntityExtCannotFindDelegator",
UtilMisc.toMap("overrideDelegatorName", overrideDelegatorName), locale));
}
}
//LocalDispatcher dispatcher = dctx.getDispatcher();
String entitySyncId = (String) context.get("entitySyncId");
// incoming lists will already be sorted by lastUpdatedStamp (or lastCreatedStamp)
List<GenericValue> valuesToCreate = UtilGenerics.cast(context.get("valuesToCreate"));
List<GenericValue> valuesToStore = UtilGenerics.cast(context.get("valuesToStore"));
List<GenericEntity> keysToRemove = UtilGenerics.cast(context.get("keysToRemove"));
if (Debug.infoOn()) {
Debug.logInfo("Running storeEntitySyncData (" + entitySyncId + ") - [" + valuesToCreate.size() + "] to create; [" + valuesToStore.size()
+ "] to store; [" + keysToRemove.size() + "] to remove.", MODULE);
}
try {
long toCreateInserted = 0;
long toCreateUpdated = 0;
long toCreateNotUpdated = 0;
long toStoreInserted = 0;
long toStoreUpdated = 0;
long toStoreNotUpdated = 0;
long toRemoveDeleted = 0;
long toRemoveAlreadyDeleted = 0;
// create all values in the valuesToCreate List; if the value already exists update it, or if exists and was updated more recently
// than this one dont update it
for (GenericValue valueToCreate : valuesToCreate) {
// to Create check if exists (find by pk), if not insert; if exists check lastUpdatedStamp: if null or before the candidate value
// insert, otherwise don't insert
// NOTE: use the delegator from this DispatchContext rather than the one named in the GenericValue
// maintain the original timestamps when doing storage of synced data, by default with will update the timestamps to now
valueToCreate.setIsFromEntitySync(true);
// check to make sure all foreign keys are created; if not create dummy values as place holders
valueToCreate.checkFks(true);
GenericValue existingValue = EntityQuery.use(delegator)
.from(valueToCreate.getEntityName())
.where(valueToCreate.getPrimaryKey())
.queryOne();
if (existingValue == null) {
delegator.create(valueToCreate);
toCreateInserted++;
} else {
// if the existing value has a stamp field that is AFTER the stamp on the valueToCreate, don't update it
if (existingValue.get(ModelEntity.STAMP_FIELD) != null && existingValue.getTimestamp(ModelEntity.STAMP_FIELD)
.after(valueToCreate.getTimestamp(ModelEntity.STAMP_FIELD))) {
toCreateNotUpdated++;
} else {
delegator.store(valueToCreate);
toCreateUpdated++;
}
}
}
// iterate through to store list and store each
for (GenericValue valueToStore : valuesToStore) {
// to store check if exists (find by pk), if not insert; if exists check lastUpdatedStamp: if null or before the candidate value
// insert, otherwise don't insert
// maintain the original timestamps when doing storage of synced data, by default with will update the timestamps to now
valueToStore.setIsFromEntitySync(true);
// check to make sure all foreign keys are created; if not create dummy values as place holders
valueToStore.checkFks(true);
GenericValue existingValue = EntityQuery.use(delegator)
.from(valueToStore.getEntityName())
.where(valueToStore.getPrimaryKey())
.queryOne();
if (existingValue == null) {
delegator.create(valueToStore);
toStoreInserted++;
} else {
// if the existing value has a stamp field that is AFTER the stamp on the valueToStore, don't update it
if (existingValue.get(ModelEntity.STAMP_FIELD) != null && existingValue.getTimestamp(ModelEntity.STAMP_FIELD)
.after(valueToStore.getTimestamp(ModelEntity.STAMP_FIELD))) {
toStoreNotUpdated++;
} else {
delegator.store(valueToStore);
toStoreUpdated++;
}
}
}
// iterate through to remove list and remove each
for (GenericEntity pkToRemove : keysToRemove) {
// check to see if it exists, if so remove and count, if not just count already removed
// always do a removeByAnd, if it was a removeByAnd great, if it was a removeByPrimaryKey, this will also work and save us a query
pkToRemove.setIsFromEntitySync(true);
// remove the stamp fields inserted by EntitySyncContext.java at or near line 646
pkToRemove.remove(ModelEntity.STAMP_TX_FIELD);
pkToRemove.remove(ModelEntity.STAMP_FIELD);
pkToRemove.remove(ModelEntity.CREATE_STAMP_TX_FIELD);
pkToRemove.remove(ModelEntity.CREATE_STAMP_FIELD);
int numRemByAnd = delegator.removeByAnd(pkToRemove.getEntityName(), pkToRemove);
if (numRemByAnd == 0) {
toRemoveAlreadyDeleted++;
} else {
toRemoveDeleted++;
}
}
Map<String, Object> result = ServiceUtil.returnSuccess();
result.put("toCreateInserted", toCreateInserted);
result.put("toCreateUpdated", toCreateUpdated);
result.put("toCreateNotUpdated", toCreateNotUpdated);
result.put("toStoreInserted", toStoreInserted);
result.put("toStoreUpdated", toStoreUpdated);
result.put("toStoreNotUpdated", toStoreNotUpdated);
result.put("toRemoveDeleted", toRemoveDeleted);
result.put("toRemoveAlreadyDeleted", toRemoveAlreadyDeleted);
if (Debug.infoOn()) {
Debug.logInfo("Finisching storeEntitySyncData (" + entitySyncId + ") - [" + keysToRemove.size() + "] to remove. Actually removed: "
+ toRemoveDeleted + " already removed: " + toRemoveAlreadyDeleted, MODULE);
}
return result;
} catch (GenericEntityException e) {
Debug.logError(e, "Exception saving Entity Sync Data for entitySyncId [" + entitySyncId + "]: " + e.toString(), MODULE);
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE, "EntityExtExceptionSavingEntitySyncData",
UtilMisc.toMap("entitySyncId", entitySyncId, "errorString", e.toString()), locale));
} catch (Throwable t) {
Debug.logError(t, "Error saving Entity Sync Data for entitySyncId [" + entitySyncId + "]: " + t.toString(), MODULE);
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE, "EntityExtErrorSavingEntitySyncData",
UtilMisc.toMap("entitySyncId", entitySyncId, "errorString", t.toString()), locale));
}
}
/**
* Run Pull Entity Sync - Pull From Remote
*@param dctx The DispatchContext that this service is operating in
*@param context Map containing the input parameters
*@return Map with the result of the service, the output parameters
*/
public static Map<String, Object> runPullEntitySync(DispatchContext dctx, Map<String, Object> context) {
LocalDispatcher dispatcher = dctx.getDispatcher();
Locale locale = (Locale) context.get("locale");
String entitySyncId = (String) context.get("entitySyncId");
String remotePullAndReportEntitySyncDataName = (String) context.get("remotePullAndReportEntitySyncDataName");
Debug.logInfo("Running runPullEntitySync for entitySyncId=" + context.get("entitySyncId"), MODULE);
// loop until no data is returned to store
boolean gotMoreData = true;
Timestamp startDate = null;
Long toCreateInserted = null;
Long toCreateUpdated = null;
Long toCreateNotUpdated = null;
Long toStoreInserted = null;
Long toStoreUpdated = null;
Long toStoreNotUpdated = null;
Long toRemoveDeleted = null;
Long toRemoveAlreadyDeleted = null;
while (gotMoreData) {
gotMoreData = false;
// call pullAndReportEntitySyncData, initially with no results, then with results from last loop
Map<String, Object> remoteCallContext = new HashMap<>();
remoteCallContext.put("entitySyncId", entitySyncId);
remoteCallContext.put("delegatorName", context.get("remoteDelegatorName"));
remoteCallContext.put("userLogin", context.get("userLogin"));
remoteCallContext.put("startDate", startDate);
remoteCallContext.put("toCreateInserted", toCreateInserted);
remoteCallContext.put("toCreateUpdated", toCreateUpdated);
remoteCallContext.put("toCreateNotUpdated", toCreateNotUpdated);
remoteCallContext.put("toStoreInserted", toStoreInserted);
remoteCallContext.put("toStoreUpdated", toStoreUpdated);
remoteCallContext.put("toStoreNotUpdated", toStoreNotUpdated);
remoteCallContext.put("toRemoveDeleted", toRemoveDeleted);
remoteCallContext.put("toRemoveAlreadyDeleted", toRemoveAlreadyDeleted);
try {
Map<String, Object> result = dispatcher.runSync(remotePullAndReportEntitySyncDataName, remoteCallContext);
if (ServiceUtil.isError(result)) {
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE, "EntityExtErrorCallingRemotePull",
UtilMisc.toMap("remotePullAndReportEntitySyncDataName", remotePullAndReportEntitySyncDataName), locale), null, null,
result);
}
startDate = (Timestamp) result.get("startDate");
try {
// store data returned, get results (just call storeEntitySyncData locally, get the numbers back and boom shakalaka)
// anything to store locally?
if (startDate != null && (UtilValidate.isNotEmpty(result.get("valuesToCreate"))
|| UtilValidate.isNotEmpty(result.get("valuesToStore"))
|| UtilValidate.isNotEmpty(result.get("keysToRemove")))) {
// yep, we got more data
gotMoreData = true;
// at least one of the is not empty, make sure none of them are null now too...
List<GenericValue> valuesToCreate = checkCollection(result.get("valuesToCreate"), GenericValue.class);
if (valuesToCreate == null) valuesToCreate = Collections.emptyList();
List<GenericValue> valuesToStore = checkCollection(result.get("valuesToStore"), GenericValue.class);
if (valuesToStore == null) valuesToStore = Collections.emptyList();
List<GenericEntity> keysToRemove = checkCollection(result.get("keysToRemove"), GenericEntity.class);
if (keysToRemove == null) keysToRemove = Collections.emptyList();
Map<String, Object> callLocalStoreContext = UtilMisc.toMap("entitySyncId", entitySyncId, "delegatorName",
context.get("localDelegatorName"),
"valuesToCreate", valuesToCreate, "valuesToStore", valuesToStore,
"keysToRemove", keysToRemove);
callLocalStoreContext.put("userLogin", context.get("userLogin"));
Map<String, Object> storeResult = dispatcher.runSync("storeEntitySyncData", callLocalStoreContext);
if (ServiceUtil.isError(storeResult)) {
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE, "EntityExtErrorCallingService", locale),
null, null, storeResult);
}
// get results for next pass
toCreateInserted = (Long) storeResult.get("toCreateInserted");
toCreateUpdated = (Long) storeResult.get("toCreateUpdated");
toCreateNotUpdated = (Long) storeResult.get("toCreateNotUpdated");
toStoreInserted = (Long) storeResult.get("toStoreInserted");
toStoreUpdated = (Long) storeResult.get("toStoreUpdated");
toStoreNotUpdated = (Long) storeResult.get("toStoreNotUpdated");
toRemoveDeleted = (Long) storeResult.get("toRemoveDeleted");
toRemoveAlreadyDeleted = (Long) storeResult.get("toRemoveAlreadyDeleted");
}
} catch (GenericServiceException e) {
Debug.logError(e, "Error calling service to store data locally: " + e.toString(), MODULE);
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE, "EntityExtErrorCallingService", locale) + e.toString());
}
} catch (GenericServiceException e) {
Debug.logError(e, "Exception calling remote pull and report EntitySync service with name: " + remotePullAndReportEntitySyncDataName
+ "; " + e.toString(), MODULE);
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE, "EntityExtErrorCallingRemotePull",
UtilMisc.toMap("remotePullAndReportEntitySyncDataName", remotePullAndReportEntitySyncDataName), locale) + e.toString());
} catch (Throwable t) {
Debug.logError(t, "Error calling remote pull and report EntitySync service with name: " + remotePullAndReportEntitySyncDataName
+ "; " + t.toString(), MODULE);
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE, "EntityExtErrorCallingRemotePull",
UtilMisc.toMap("remotePullAndReportEntitySyncDataName", remotePullAndReportEntitySyncDataName), locale) + t.toString());
}
}
return ServiceUtil.returnSuccess();
}
/**
* Pull and Report Entity Sync Data - Called Remotely to Push Results from last pull, the Pull next set of results.
*@param dctx The DispatchContext that this service is operating in
*@param context Map containing the input parameters
*@return Map with the result of the service, the output parameters
*/
public static Map<String, Object> pullAndReportEntitySyncData(DispatchContext dctx, Map<String, ? extends Object> context) {
EntitySyncContext esc = null;
Locale locale = (Locale) context.get("locale");
try {
esc = new EntitySyncContext(dctx, context);
Debug.logInfo("Doing pullAndReportEntitySyncData for entitySyncId=" + esc.getEntitySyncId() + ", currentRunStartTime="
+ esc.getCurrentRunStartTime() + ", currentRunEndTime=" + esc.getCurrentRunEndTime(), MODULE);
if ("Y".equals(esc.getEntitySync().get("forPushOnly"))) {
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE, "EntityExtCannotDoEntitySyncPush", locale));
}
// Part 1: if any results are passed, store the results for the given startDate, update EntitySync, etc
// restore info from last pull, or if no results start new run
esc.runPullStartOrRestoreSavedResults();
// increment starting time to run until now
while (esc.hasMoreTimeToSync()) {
// make sure the following message is commented out before commit:
// Debug.logInfo("(loop)Doing pullAndReportEntitySyncData split, currentRunStartTime=" + esc.currentRunStartTime + ",
// currentRunEndTime=" + esc.currentRunEndTime, MODULE);
esc.setTotalSplits(esc.getTotalSplits() + 1);
// tx times are indexed
// keep track of how long these sync runs take and store that info on the history table
// saves info about removed, all entities that don't have no-auto-stamp set, this will be done in the GenericDAO like the stamp sets
// Part 2: get the next set of data for the given entitySyncId
// Part 2a: return it back for storage but leave the EntitySyncHistory without results, and don't update the EntitySync last time
// ===== INSERTS =====
ArrayList<GenericValue> valuesToCreate = esc.assembleValuesToCreate();
// ===== UPDATES =====
ArrayList<GenericValue> valuesToStore = esc.assembleValuesToStore();
// ===== DELETES =====
List<GenericEntity> keysToRemove = esc.assembleKeysToRemove();
esc.setTotalRowCounts(valuesToCreate, valuesToStore, keysToRemove);
if (Debug.infoOn()) {
Debug.logInfo("Service pullAndReportEntitySyncData returning - [" + valuesToCreate.size() + "] to create; ["
+ valuesToStore.size() + "] to store; [" + keysToRemove.size() + "] to remove; [" + esc.getTotalRowsPerSplit()
+ "] total rows per split.", MODULE);
}
if (esc.getTotalRowsPerSplit() > 0) {
// stop if we found some data, otherwise look and try again
Map<String, Object> result = ServiceUtil.returnSuccess();
result.put("startDate", esc.getStartDate());
result.put("valuesToCreate", valuesToCreate);
result.put("valuesToStore", valuesToStore);
result.put("keysToRemove", keysToRemove);
return result;
} else {
// save the progress to EntitySync and EntitySyncHistory, and move on...
esc.saveResultsReportedFromDataStore();
esc.advanceRunTimes();
}
}
// if no more results from database to return, save final settings
if (!esc.hasMoreTimeToSync()) {
esc.saveFinalSyncResults();
}
} catch (SyncAbortException e) {
return e.returnError(MODULE);
} catch (SyncErrorException e) {
e.saveSyncErrorInfo(esc);
return e.returnError(MODULE);
}
return ServiceUtil.returnSuccess();
}
public static Map<String, Object> runOfflineEntitySync(DispatchContext dctx, Map<String, ? extends Object> context) {
String fileName = (String) context.get("fileName");
EntitySyncContext esc = null;
long totalRowsExported = 0;
try {
esc = new EntitySyncContext(dctx, context);
Debug.logInfo("Doing runManualEntitySync for entitySyncId=" + esc.getEntitySyncId() + ", currentRunStartTime="
+ esc.getCurrentRunStartTime() + ", currentRunEndTime=" + esc.getCurrentRunEndTime(), MODULE);
Document mainDoc = UtilXml.makeEmptyXmlDocument("xml-entity-synchronization");
Element docElement = mainDoc.getDocumentElement();
docElement.setAttribute("xml:lang", "en-US");
esc.runOfflineStartRunning();
// increment starting time to run until now
esc.setSplitStartTime(); // just run this the first time, will be updated between each loop automatically
while (esc.hasMoreTimeToSync()) {
esc.setTotalSplits(esc.getTotalSplits() + 1);
ArrayList<GenericValue> valuesToCreate = esc.assembleValuesToCreate();
ArrayList<GenericValue> valuesToStore = esc.assembleValuesToStore();
List<GenericEntity> keysToRemove = esc.assembleKeysToRemove();
long currentRows = esc.setTotalRowCounts(valuesToCreate, valuesToStore, keysToRemove);
totalRowsExported += currentRows;
if (currentRows > 0) {
// create the XML document
Element syncElement = UtilXml.addChildElement(docElement, "entity-sync", mainDoc);
syncElement.setAttribute("entitySyncId", esc.getEntitySyncId());
syncElement.setAttribute("lastSuccessfulSynchTime", esc.getCurrentRunEndTime().toString());
// serialize the list data for XML storage
try {
UtilXml.addChildElementValue(syncElement, "values-to-create", XmlSerializer.serialize(valuesToCreate), mainDoc);
UtilXml.addChildElementValue(syncElement, "values-to-store", XmlSerializer.serialize(valuesToStore), mainDoc);
UtilXml.addChildElementValue(syncElement, "keys-to-remove", XmlSerializer.serialize(keysToRemove), mainDoc);
} catch (SerializeException e) {
throw new EntitySyncContext.SyncOtherErrorException("List serialization problem", e);
} catch (IOException e) {
throw new EntitySyncContext.SyncOtherErrorException("XML writing problem", e);
}
}
// update the result info
esc.runSaveOfflineSyncInfo(currentRows);
esc.advanceRunTimes();
}
if (totalRowsExported > 0) {
// check the file name; use a default if none is passed in
if (UtilValidate.isEmpty(fileName)) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
fileName = "offline_entitySync-" + esc.getEntitySyncId() + "-" + sdf.format(new Date()) + ".xml";
}
// write the XML file
try {
UtilXml.writeXmlDocument(fileName, mainDoc);
} catch (java.io.IOException e) {
throw new EntitySyncContext.SyncOtherErrorException(e);
}
} else {
Debug.logInfo("No rows to write; no data exported.", MODULE);
}
// save the final results
esc.saveFinalSyncResults();
} catch (SyncAbortException e) {
return e.returnError(MODULE);
} catch (SyncErrorException e) {
e.saveSyncErrorInfo(esc);
return e.returnError(MODULE);
}
return ServiceUtil.returnSuccess();
}
public static Map<String, Object> loadOfflineSyncData(DispatchContext dctx, Map<String, ? extends Object> context) {
LocalDispatcher dispatcher = dctx.getDispatcher();
Delegator delegator = dctx.getDelegator();
GenericValue userLogin = (GenericValue) context.get("userLogin");
String fileName = (String) context.get("xmlFileName");
Locale locale = (Locale) context.get("locale");
URL xmlFile = UtilURL.fromResource(fileName);
if (xmlFile != null) {
Document xmlSyncDoc = null;
try {
xmlSyncDoc = UtilXml.readXmlDocument(xmlFile, false);
} catch (SAXException | IOException | ParserConfigurationException e) {
Debug.logError(e, MODULE);
}
if (xmlSyncDoc == null) {
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE, "EntityExtEntitySyncXMLDocumentIsNotValid",
UtilMisc.toMap("fileName", fileName), locale));
}
List<? extends Element> syncElements = UtilXml.childElementList(xmlSyncDoc.getDocumentElement());
if (syncElements != null) {
for (Element entitySync: syncElements) {
String entitySyncId = entitySync.getAttribute("entitySyncId");
String startTime = entitySync.getAttribute("lastSuccessfulSynchTime");
String createString = UtilXml.childElementValue(entitySync, "values-to-create");
String storeString = UtilXml.childElementValue(entitySync, "values-to-store");
String removeString = UtilXml.childElementValue(entitySync, "keys-to-remove");
// de-serialize the value lists
try {
List<GenericValue> valuesToCreate = checkCollection(XmlSerializer.deserialize(createString, delegator), GenericValue.class);
List<GenericValue> valuesToStore = checkCollection(XmlSerializer.deserialize(storeString, delegator), GenericValue.class);
List<GenericEntity> keysToRemove = checkCollection(XmlSerializer.deserialize(removeString, delegator), GenericEntity.class);
Map<String, Object> storeContext = UtilMisc.toMap("entitySyncId", entitySyncId, "valuesToCreate", valuesToCreate,
"valuesToStore", valuesToStore, "keysToRemove", keysToRemove, "userLogin", userLogin);
// store the value(s)
Map<String, Object> storeResult = dispatcher.runSync("storeEntitySyncData", storeContext);
if (ServiceUtil.isError(storeResult)) {
throw new GenericServiceException(ServiceUtil.getErrorMessage(storeResult));
}
// TODO create a response document to send back to the initial sync machine
} catch (GenericServiceException | IOException | ParserConfigurationException | SAXException | SerializeException gse) {
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE, "EntityExtUnableToLoadXMLDocument",
UtilMisc.toMap("entitySyncId", entitySyncId, "startTime", startTime, "errorString", gse.getMessage()), locale));
}
}
}
} else {
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE, "EntityExtOfflineXMLFileNotFound",
UtilMisc.toMap("fileName", fileName), locale));
}
return ServiceUtil.returnSuccess();
}
public static Map<String, Object> updateOfflineEntitySync(DispatchContext dctx, Map<String, Object> context) {
Locale locale = (Locale) context.get("locale");
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE, "EntityExtThisServiceIsNotYetImplemented", locale));
}
/**
* Clean EntitySyncRemove Info
*@param dctx The DispatchContext that this service is operating in
*@param context Map containing the input parameters
*@return Map with the result of the service, the output parameters
*/
public static Map<String, Object> cleanSyncRemoveInfo(DispatchContext dctx, Map<String, ? extends Object> context) {
Debug.logInfo("Running cleanSyncRemoveInfo", MODULE);
Delegator delegator = dctx.getDelegator();
Locale locale = (Locale) context.get("locale");
try {
// find the largest keepRemoveInfoHours value on an EntitySyncRemove and kill everything before that,
// if none found default to 10 days (240 hours)
double keepRemoveInfoHours = 24;
List<GenericValue> entitySyncRemoveList = EntityQuery.use(delegator).from("EntitySync").queryList();
for (GenericValue entitySyncRemove: entitySyncRemoveList) {
Double curKrih = entitySyncRemove.getDouble("keepRemoveInfoHours");
if (curKrih != null) {
double curKrihVal = curKrih;
if (curKrihVal > keepRemoveInfoHours) {
keepRemoveInfoHours = curKrihVal;
}
}
}
int keepSeconds = (int) Math.floor(keepRemoveInfoHours * 3600);
Calendar nowCal = Calendar.getInstance();
nowCal.setTimeInMillis(System.currentTimeMillis());
nowCal.add(Calendar.SECOND, -keepSeconds);
Timestamp keepAfterStamp = new Timestamp(nowCal.getTimeInMillis());
int numRemoved = delegator.removeByCondition("EntitySyncRemove", EntityCondition.makeCondition(ModelEntity.STAMP_TX_FIELD,
EntityOperator.LESS_THAN, keepAfterStamp));
Debug.logInfo("In cleanSyncRemoveInfo removed [" + numRemoved + "] values with TX timestamp before [" + keepAfterStamp + "]", MODULE);
return ServiceUtil.returnSuccess();
} catch (GenericEntityException e) {
Debug.logError(e, "Error cleaning out EntitySyncRemove info: " + e.toString(), MODULE);
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE, "EntityExtErrorCleaningEntitySyncRemove",
UtilMisc.toMap("errorString", e.toString()), locale));
}
}
}
|
apache/tika | 35,448 | tika-parsers/tika-parsers-standard/tika-parsers-standard-modules/tika-parser-microsoft-module/src/test/java/org/apache/tika/parser/microsoft/ooxml/SXWPFExtractorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tika.parser.microsoft.ooxml;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.io.PrintStream;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.apache.tika.TikaTest;
import org.apache.tika.config.TikaConfig;
import org.apache.tika.exception.EncryptedDocumentException;
import org.apache.tika.metadata.DublinCore;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.metadata.Office;
import org.apache.tika.metadata.OfficeOpenXMLCore;
import org.apache.tika.metadata.OfficeOpenXMLExtended;
import org.apache.tika.metadata.TikaCoreProperties;
import org.apache.tika.parser.AutoDetectParser;
import org.apache.tika.parser.ParseContext;
import org.apache.tika.parser.PasswordProvider;
import org.apache.tika.parser.microsoft.OfficeParserConfig;
public class SXWPFExtractorTest extends TikaTest {
private ParseContext parseContext;
@BeforeEach
public void setUp() {
parseContext = new ParseContext();
OfficeParserConfig officeParserConfig = new OfficeParserConfig();
officeParserConfig.setUseSAXDocxExtractor(true);
officeParserConfig.setUseSAXPptxExtractor(true);
parseContext.set(OfficeParserConfig.class, officeParserConfig);
}
@Test
public void basicTest() throws Exception {
List<Metadata> metadataList = getRecursiveMetadata("testWORD_2006ml.docx", parseContext);
assertEquals(8, metadataList.size());
Metadata m = metadataList.get(0);
assertEquals("2016-11-29T00:58:00Z", m.get(TikaCoreProperties.CREATED));
assertEquals("2016-11-29T17:54:00Z", m.get(TikaCoreProperties.MODIFIED));
assertEquals("My Document Title", m.get(TikaCoreProperties.TITLE));
assertEquals("This is the Author", m.get(TikaCoreProperties.CREATOR));
assertEquals("3", m.get(OfficeOpenXMLCore.REVISION));
assertEquals("Allison, Timothy B.", m.get(TikaCoreProperties.MODIFIER));
//assertEquals("0", m.get(OfficeOpenXMLExtended.DOC_SECURITY));
assertEquals("260", m.get(Office.WORD_COUNT));
assertEquals("3", m.get(Office.PARAGRAPH_COUNT));
assertEquals("1742", m.get(Office.CHARACTER_COUNT_WITH_SPACES));
assertEquals("12", m.get(Office.LINE_COUNT));
assertEquals("16.0000", m.get(OfficeOpenXMLExtended.APP_VERSION));
String content = m.get(TikaCoreProperties.TIKA_CONTENT);
assertContainsCount("engaging title page", content, 1);
//need \n to differentiate from metadata values
assertContainsCount("This is the Author\n", content, 1);
assertContainsCount("This is an engaging title page", content, 1);
assertContains("My Document Title", content);
assertContains("My Document Subtitle", content);
assertContains("<p class=\"toc_1\">\t<a href=\"#_Toc467647605\">Heading1\t3</a></p>",
content);
assertContains("2. Really basic 2.", content);
assertContainsCount("This is a text box", content, 1);
assertContains("<p>This is a hyperlink: <a href=\"http://tika.apache.org\">tika</a></p>",
content);
assertContains(
"<p>This is a link to a local file: <a href=\"file:///C:/data/test.png\">test.png</a></p>",
content);
assertContains("<p>This is 10 spaces</p>", content);
//caption
assertContains(
"<p class=\"table_of_figures\">\t<a href=\"#_Toc467647797\">Table 1: Table1 Caption\t2</a></p>",
content);
//embedded table
//TODO: figure out how to handle embedded tables in html
assertContains("<td>Embedded table r1c1", content);
//shape
assertContainsCount("<p>This is text within a shape", content, 1);
//sdt rich text
assertContains("<p>Rich text content control", content);
//sdt simple text
assertContains("<p>Simple text content control", content);
//sdt repeating
assertContains("Repeating content", content);
//sdt dropdown
//TODO: get options for dropdown
assertContains("Drop down1", content);
//sdt date
assertContains("<p>11/16/2016</p>", content);
//test that <tab/> works
assertContains("tab\ttab", content);
assertContainsCount("serious word art", content, 1);
assertContainsCount("Wordartr1c1", content, 1);
//glossary document contents
assertContains("Click or tap to enter a date", content);
//basic b/i tags...make sure not to overlap!
assertContains("<p>The <i>quick</i> brown <b>fox </b>j<i>um</i><b><i>ped</i></b> over",
content);
assertContains("This is a comment", content);
assertContains("This is an endnote", content);
assertContains("this is the footnote", content);
assertContains("First page header", content);
assertContains("Even page header", content);
assertContains("Odd page header", content);
assertContains("First page footer", content);
assertContains("Even page footer", content);
assertContains("Odd page footer", content);
//test default does not include deleted
assertNotContained("frog", content);
assertContains("Mattmann", content);
//TODO: extract chart text
// assertContains("This is the chart title", content);
//TODO: add chart parsing
// assertContains("This is the chart", content);
}
/**
* Test the plain text output of the Word converter
*
* @throws Exception
*/
@Test
public void testWord() throws Exception {
XMLResult xmlResult = getXML("testWORD.docx", parseContext);
assertEquals("application/vnd.openxmlformats-officedocument.wordprocessingml.document",
xmlResult.metadata.get(Metadata.CONTENT_TYPE));
assertEquals("Sample Word Document", xmlResult.metadata.get(TikaCoreProperties.TITLE));
assertEquals("Keith Bennett", xmlResult.metadata.get(TikaCoreProperties.CREATOR));
assertTrue(xmlResult.xml.contains("Sample Word Document"));
}
/**
* Test the plain text output of the Word converter
*
* @throws Exception
*/
@Test
public void testWordFootnote() throws Exception {
XMLResult xmlResult = getXML("footnotes.docx", parseContext);
assertEquals("application/vnd.openxmlformats-officedocument.wordprocessingml.document",
xmlResult.metadata.get(Metadata.CONTENT_TYPE));
assertTrue(xmlResult.xml.contains("snoska"));
}
/**
* Test that the word converter is able to generate the
* correct HTML for the document
*/
@Test
public void testWordHTML() throws Exception {
XMLResult result = getXML("testWORD.docx", parseContext);
String xml = result.xml;
Metadata metadata = result.metadata;
assertEquals("application/vnd.openxmlformats-officedocument.wordprocessingml.document",
metadata.get(Metadata.CONTENT_TYPE));
assertEquals("Sample Word Document", metadata.get(TikaCoreProperties.TITLE));
assertEquals("Keith Bennett", metadata.get(TikaCoreProperties.CREATOR));
assertTrue(xml.contains("Sample Word Document"));
// Check that custom headings came through
assertTrue(xml.contains("<h1 class=\"title\">"));
// Regular headings
assertContains("<h1>Heading Level 1</h1>", xml);
assertTrue(xml.contains("<h2>Heading Level 2</h2>"));
// Headings with anchor tags in them
//TODO: still not getting bookmarks
assertTrue(xml.contains("<h3>Heading Level 3<a name=\"OnLevel3\" /></h3>"));
// assertTrue(xml.contains("<h3>Heading Level 3</h3>"));
// Bold and italic
assertTrue(xml.contains("<b>BOLD</b>"));
assertTrue(xml.contains("<i>ITALIC</i>"));
// Table
assertTrue(xml.contains("<table>"));
assertTrue(xml.contains("<td>"));
// Links
assertTrue(xml.contains("<a href=\"http://tika.apache.org/\">Tika</a>"));
// Anchor links
assertContains("<a href=\"#OnMainHeading\">The Main Heading Bookmark</a>", xml);
// Paragraphs with other styles
assertTrue(xml.contains("<p class=\"signature\">This one"));
result = getXML("testWORD_3imgs.docx", parseContext);
xml = result.xml;
// Images 2-4 (there is no 1!)
assertTrue(xml.contains("<img src=\"embedded:image2.png\" alt=\"A description...\" />"),
"Image not found in:\n" + xml);
assertTrue(xml.contains("<img src=\"embedded:image3.jpeg\" alt=\"A description...\" />"),
"Image not found in:\n" + xml);
assertTrue(xml.contains("<img src=\"embedded:image4.png\" alt=\"A description...\" />"),
"Image not found in:\n" + xml);
// Text too
assertTrue(xml.contains("<p>The end!</p>"));
}
@Test
public void testContiguousHTMLFormatting() throws Exception {
// TIKA-692: test document containing multiple
// character runs within a bold tag:
String xml = getXML("testWORD_bold_character_runs.docx", parseContext).xml;
// Make sure bold text arrived as single
// contiguous string even though Word parser
// handled this as 3 character runs
assertTrue(xml.contains("F<b>oob</b>a<b>r</b>"), "Bold text wasn't contiguous: " + xml);
// TIKA-692: test document containing multiple
// character runs within a bold tag:
xml = getXML("testWORD_bold_character_runs2.docx", parseContext).xml;
// Make sure bold text arrived as single
// contiguous string even though Word parser
// handled this as 3 character runs
assertTrue(xml.contains("F<b>oob</b>a<b>r</b>"), "Bold text wasn't contiguous: " + xml);
}
/**
* Test that we can extract image from docx header
*/
@Test
public void testWordPicturesInHeader() throws Exception {
List<Metadata> metadataList = getRecursiveMetadata("headerPic.docx", parseContext);
assertEquals(2, metadataList.size());
Metadata m = metadataList.get(0);
String mainContent = m.get(TikaCoreProperties.TIKA_CONTENT);
assertEquals("application/vnd.openxmlformats-officedocument.wordprocessingml.document",
m.get(Metadata.CONTENT_TYPE));
// Check that custom headings came through
assertTrue(mainContent.contains("<img"));
}
@Test
public void testPicturesInVariousPlaces() throws Exception {
//test that images are actually extracted from
//headers, footers, comments, endnotes, footnotes
List<Metadata> metadataList =
getRecursiveMetadata("testWORD_embedded_pics.docx", parseContext);
//only process embedded resources once
assertEquals(3, metadataList.size());
String content = metadataList.get(0).get(TikaCoreProperties.TIKA_CONTENT);
for (int i = 1; i < 4; i++) {
assertContains("header" + i + "_pic", content);
assertContains("footer" + i + "_pic", content);
}
assertContains("body_pic.jpg", content);
assertContains("sdt_pic.jpg", content);
assertContains("deeply_embedded_pic", content);
assertContains("deleted_pic", content);//TODO: don't extract this
assertContains("footnotes_pic", content);
assertContains("comments_pic", content);
assertContains("endnotes_pic", content);
// assertContains("sdt2_pic.jpg", content);//name of file is not stored in image-sdt
assertContainsCount("<img src=", content, 14);
}
/**
* Test docx without headers
* TIKA-633
*/
@Test
public void testNullHeaders() throws Exception {
XMLResult xmlResult = getXML("NullHeader.docx", parseContext);
assertEquals(false, xmlResult.xml.isEmpty(), "Should have found some text");
}
@Test
public void testVarious() throws Exception {
Metadata metadata = new Metadata();
String content = getText("testWORD_various.docx", metadata, parseContext);
//content = content.replaceAll("\\s+"," ");
assertContains("Footnote appears here", content);
assertContains("This is a footnote.", content);
assertContains("This is the header text.", content);
assertContains("This is the footer text.", content);
assertContains("Here is a text box", content);
assertContains("Bold", content);
assertContains("italic", content);
assertContains("underline", content);
assertContains("superscript", content);
assertContains("subscript", content);
assertContains("Here is a citation:", content);
assertContains("Figure 1 This is a caption for Figure 1", content);
assertContains("(Kramer)", content);
assertContains("Row 1 Col 1 Row 1 Col 2 Row 1 Col 3 Row 2 Col 1 Row 2 Col 2 Row 2 Col 3",
content.replaceAll("\\s+", " "));
assertContains("Row 1 column 1 Row 2 column 1 Row 1 column 2 Row 2 column 2",
content.replaceAll("\\s+", " "));
assertContains("This is a hyperlink", content);
assertContains("Here is a list:", content);
for (int row = 1; row <= 3; row++) {
//assertContains("·\tBullet " + row, content);
//assertContains("\u00b7\tBullet " + row, content);
assertContains("Bullet " + row, content);
}
assertContains("Here is a numbered list:", content);
for (int row = 1; row <= 3; row++) {
//assertContains(row + ")\tNumber bullet " + row, content);
//assertContains(row + ") Number bullet " + row, content);
// TODO: OOXMLExtractor fails to number the bullets:
assertContains("Number bullet " + row, content);
}
for (int row = 1; row <= 2; row++) {
for (int col = 1; col <= 3; col++) {
assertContains("Row " + row + " Col " + col, content);
}
}
assertContains("Keyword1 Keyword2", content);
assertEquals("Keyword1 Keyword2", metadata.get(Office.KEYWORDS));
assertContains("Subject is here", content);
assertEquals("Subject is here", metadata.get(DublinCore.SUBJECT));
assertContains("Suddenly some Japanese text:", content);
// Special version of (GHQ)
assertContains("\uff08\uff27\uff28\uff31\uff09", content);
// 6 other characters
assertContains("\u30be\u30eb\u30b2\u3068\u5c3e\u5d0e\u3001\u6de1\u3005\u3068\u6700\u671f",
content);
assertContains("And then some Gothic text:", content);
assertContains("\uD800\uDF32\uD800\uDF3f\uD800\uDF44\uD800\uDF39\uD800\uDF43\uD800\uDF3A",
content);
}
@Test
public void testWordCustomProperties() throws Exception {
ParseContext context = new ParseContext();
context.set(Locale.class, Locale.US);
Metadata metadata = getXML("testWORD_custom_props.docx", parseContext).metadata;
assertEquals("application/vnd.openxmlformats-officedocument.wordprocessingml.document",
metadata.get(Metadata.CONTENT_TYPE));
assertEquals("EJ04325S", metadata.get(TikaCoreProperties.CREATOR));
assertEquals("Etienne Jouvin", metadata.get(TikaCoreProperties.MODIFIER));
assertEquals("2011-07-29T16:52:00Z", metadata.get(TikaCoreProperties.CREATED));
assertEquals("2012-01-03T22:14:00Z", metadata.get(TikaCoreProperties.MODIFIED));
assertEquals("Microsoft Office Word", metadata.get(OfficeOpenXMLExtended.APPLICATION));
assertEquals("1", metadata.get(Office.PAGE_COUNT));
assertEquals("2", metadata.get(Office.WORD_COUNT));
assertEquals("My Title", metadata.get(TikaCoreProperties.TITLE));
assertEquals("My Keyword", metadata.get(Office.KEYWORDS));
assertContains("My Keyword", Arrays.asList(metadata.getValues(TikaCoreProperties.SUBJECT)));
assertEquals("Normal.dotm", metadata.get(OfficeOpenXMLExtended.TEMPLATE));
assertEquals("My subject", metadata.get(DublinCore.SUBJECT));
assertEquals("EDF-DIT", metadata.get(TikaCoreProperties.PUBLISHER));
assertEquals("true", metadata.get("custom:myCustomBoolean"));
assertEquals("3", metadata.get("custom:myCustomNumber"));
assertEquals("MyStringValue", metadata.get("custom:MyCustomString"));
assertEquals("2010-12-30T23:00:00Z", metadata.get("custom:MyCustomDate"));
assertEquals("2010-12-29T22:00:00Z", metadata.get("custom:myCustomSecondDate"));
}
// TIKA-989:
@Test
public void testEmbeddedPDF() throws Exception {
String xml = getXML("testWORD_embedded_pdf.docx", parseContext).xml;
int i = xml.indexOf("Here is the pdf file:");
int j = xml.indexOf("<div class=\"embedded\" id=\"rId5\" />");
int k = xml.indexOf("Bye Bye");
int l = xml.indexOf("<div class=\"embedded\" id=\"rId6\" />");
int m = xml.indexOf("Bye for real.");
assertTrue(i != -1);
assertTrue(j != -1);
assertTrue(k != -1);
assertTrue(l != -1);
assertTrue(m != -1);
assertTrue(i < j);
assertTrue(j < k);
assertTrue(k < l);
assertTrue(l < m);
}
// TIKA-1006
@Test
public void testWordNullStyle() throws Exception {
String xml = getXML("testWORD_null_style.docx").xml;
assertContains("Test av styrt dokument", xml);
}
/**
* TIKA-1044 - Handle word documents where parts of the
* text have no formatting or styles applied to them
*/
@Test
public void testNoFormat() throws Exception {
assertContains("This is a piece of text that causes an exception",
getXML("testWORD_no_format.docx", parseContext).xml);
}
@Test
public void testSkipDeleted() throws Exception {
ParseContext pc = new ParseContext();
OfficeParserConfig officeParserConfig = new OfficeParserConfig();
officeParserConfig.setIncludeDeletedContent(true);
officeParserConfig.setUseSAXDocxExtractor(true);
officeParserConfig.setIncludeMoveFromContent(true);
pc.set(OfficeParserConfig.class, officeParserConfig);
XMLResult r = getXML("testWORD_2006ml.docx", pc);
assertContains("frog", r.xml);
assertContainsCount("Second paragraph", r.xml, 2);
}
// TIKA-1005:
@Test
public void testTextInsideTextBox() throws Exception {
String xml = getXML("testWORD_text_box.docx", parseContext).xml;
assertContains("This text is directly in the body of the document.", xml);
assertContains("This text is inside of a text box in the body of the document.", xml);
assertContains("This text is inside of a text box in the header of the document.", xml);
assertContains("This text is inside of a text box in the footer of the document.", xml);
}
//TIKA-2346
@Test
public void testTurningOffTextBoxExtraction() throws Exception {
ParseContext pc = new ParseContext();
OfficeParserConfig officeParserConfig = new OfficeParserConfig();
officeParserConfig.setIncludeShapeBasedContent(false);
officeParserConfig.setUseSAXDocxExtractor(true);
pc.set(OfficeParserConfig.class, officeParserConfig);
String xml = getXML("testWORD_text_box.docx", pc).xml;
assertContains("This text is directly in the body of the document.", xml);
assertNotContained("This text is inside of a text box in the body of the document.", xml);
assertNotContained("This text is inside of a text box in the header of the document.", xml);
assertNotContained("This text is inside of a text box in the footer of the document.", xml);
}
/**
* Test for missing text described in
* <a href="https://issues.apache.org/jira/browse/TIKA-1130">TIKA-1130</a>.
* and TIKA-1317
*/
@Test
public void testMissingText() throws Exception {
XMLResult xmlResult = getXML("testWORD_missing_text.docx", parseContext);
assertEquals("application/vnd.openxmlformats-officedocument.wordprocessingml.document",
xmlResult.metadata.get(Metadata.CONTENT_TYPE));
assertContains("BigCompany", xmlResult.xml);
assertContains("Seasoned", xmlResult.xml);
assertContains("Rich_text_in_cell", xmlResult.xml);
}
//TIKA-792; with room for future missing bean tests
@Test
public void testWordMissingOOXMLBeans() throws Exception {
//If a bean is missing, POI prints stack trace to stderr
String[] fileNames = new String[]{"testWORD_missing_ooxml_bean1.docx",//TIKA-792
};
PrintStream origErr = System.err;
for (String fileName : fileNames) {
//grab stderr
ByteArrayOutputStream errContent = new ByteArrayOutputStream();
System.setErr(new PrintStream(errContent, true, UTF_8.name()));
getXML(fileName, parseContext);
//return stderr
System.setErr(origErr);
String err = errContent.toString(UTF_8.name());
assertTrue(err.isEmpty(), "expected no error msg, but got >" +
err + "<");
}
}
@Test
public void testDOCXThumbnail() throws Exception {
String xml = getXML("testDOCX_Thumbnail.docx", parseContext).xml;
int a = xml.indexOf("This file contains a thumbnail");
int b = xml.indexOf("<div class=\"embedded\" id=\"/docProps/thumbnail.emf\" />");
assertTrue(a != -1);
assertTrue(b != -1);
assertTrue(a < b);
}
@Test
public void testEncrypted() throws Exception {
Map<String, String> tests = new HashMap<>();
tests.put("testWORD_protected_passtika.docx", "This is an encrypted Word 2007 File");
Metadata m = new Metadata();
PasswordProvider passwordProvider = metadata -> "tika";
OfficeParserConfig opc = new OfficeParserConfig();
opc.setUseSAXDocxExtractor(true);
ParseContext passwordContext = new ParseContext();
passwordContext.set(org.apache.tika.parser.PasswordProvider.class, passwordProvider);
passwordContext.set(OfficeParserConfig.class, opc);
for (Map.Entry<String, String> e : tests.entrySet()) {
assertContains(e.getValue(), getXML(e.getKey(), passwordContext).xml);
}
//now try with no password
for (Map.Entry<String, String> e : tests.entrySet()) {
boolean exc = false;
try {
getXML(e.getKey(), parseContext);
} catch (EncryptedDocumentException ex) {
exc = true;
}
assertTrue(exc);
}
}
@Test
public void testDOCXParagraphNumbering() throws Exception {
String xml = getXML("testWORD_numbered_list.docx", parseContext).xml;
//SAX parser is getting this. DOM parser is not!
assertContains("add a list here", xml);
assertContains("1) This", xml);
assertContains("a) Is", xml);
assertContains("i) A multi", xml);
assertContains("ii) Level", xml);
assertContains("1. Within cell 1", xml);
assertContains("b. Cell b", xml);
assertContains("iii) List", xml);
assertContains("2) foo", xml);
assertContains("ii) baz", xml);
assertContains("ii) foo", xml);
assertContains("II. bar", xml);
assertContains("6. six", xml);
assertContains("7. seven", xml);
assertContains("a. seven a", xml);
assertContains("e. seven e", xml);
assertContains("2. A ii 2", xml);
assertContains("3. page break list 3", xml);
assertContains("Some-1-CrazyFormat Greek numbering with crazy format - alpha", xml);
assertContains("1.1.1. 1.1.1", xml);
assertContains("1.1. 1.2->1.1 //set the value", xml);
}
@Test
public void testDOCXOverrideParagraphNumbering() throws Exception {
String xml = getXML("testWORD_override_list_numbering.docx").xml;
//Test 1
assertContains("<p>1.1.1.1...1 1.1.1.1...1</p>", xml);
assertContains("1st.2.3someText 1st.2.3someText", xml);
assertContains("1st.2.2someOtherText.1 1st.2.2someOtherText.1", xml);
assertContains("5th 5th", xml);
//Test 2
assertContains("1.a.I 1.a.I", xml);
//test no reset because level 2 is not sufficient to reset
assertContains("<p>1.b.III 1.b.III</p>", xml);
//test restarted because of level 0's increment to 2
assertContains("2.a.I 2.a.I", xml);
//test handling of skipped level
assertContains("<p>2.b 2.b</p>", xml);
//Test 3
assertContains("(1)) (1))", xml);
//tests start level 1 at 17 and
assertContains("2.17 2.17", xml);
//tests that isLegal turns everything into decimal
assertContains("2.18.2.1 2.18.2.1", xml);
assertContains("<p>2 2</p>", xml);
//Test4
assertContains("<p>1 1</p>", xml);
assertContains("<p>A A</p>", xml);
assertContains("<p>B B</p>", xml);
//this tests overrides
assertContains("<p>C C</p>", xml);
assertContains("<p>4 4</p>", xml);
//Test5
assertContains(">00 00", xml);
assertContains(">01 01", xml);
assertContains(">01. 01.", xml);
assertContains(">01..1 01..1", xml);
assertContains(">02 02", xml);
}
@Test
public void testMultiAuthorsManagers() throws Exception {
XMLResult r = getXML("testWORD_multi_authors.docx", parseContext);
String[] authors = r.metadata.getValues(TikaCoreProperties.CREATOR);
assertEquals(3, authors.length);
assertEquals("author2", authors[1]);
String[] managers = r.metadata.getValues(OfficeOpenXMLExtended.MANAGER);
assertEquals(2, managers.length);
assertEquals("manager1", managers[0]);
assertEquals("manager2", managers[1]);
}
@Test
public void testOrigSourcePath() throws Exception {
Metadata embed1_zip_metadata =
getRecursiveMetadata("test_recursive_embedded.docx", parseContext).get(2);
assertContains("C:\\Users\\tallison\\AppData\\Local\\Temp\\embed1.zip", Arrays.asList(
embed1_zip_metadata.getValues(TikaCoreProperties.ORIGINAL_RESOURCE_NAME)));
assertContains("C:\\Users\\tallison\\Desktop\\tmp\\New folder (2)\\embed1.zip",
Arrays.asList(
embed1_zip_metadata.getValues(TikaCoreProperties.ORIGINAL_RESOURCE_NAME)));
}
@Test
public void testBoldHyperlink() throws Exception {
//TIKA-1255
String xml = getXML("testWORD_boldHyperlink.docx", parseContext).xml;
xml = xml.replaceAll("\\s+", " ");
assertContains("<a href=\"http://tika.apache.org/\">hyper <b>link</b></a>", xml);
assertContains("<a href=\"http://tika.apache.org/\"><b>hyper</b> link</a>; bold", xml);
}
@Test
public void testLongForIntExceptionInSummaryDetails() throws Exception {
//TIKA-2055
assertContains("bold", getXML("testWORD_totalTimeOutOfRange.docx", parseContext).xml);
}
@Test
public void testMacrosInDocm() throws Exception {
Metadata parsedBy = new Metadata();
parsedBy.add(TikaCoreProperties.TIKA_PARSED_BY,
"org.apache.tika.parser.microsoft.ooxml.xwpf.XWPFEventBasedWordExtractor");
//test default is "don't extract macros"
List<Metadata> metadataList = getRecursiveMetadata("testWORD_macros.docm", parseContext);
for (Metadata metadata : metadataList) {
if (metadata.get(Metadata.CONTENT_TYPE).equals("text/x-vbasic")) {
fail("Shouldn't have extracted macros as default");
}
}
assertContainsAtLeast(parsedBy, metadataList);
//now test that they were extracted
ParseContext context = new ParseContext();
OfficeParserConfig officeParserConfig = new OfficeParserConfig();
officeParserConfig.setExtractMacros(true);
officeParserConfig.setUseSAXDocxExtractor(true);
context.set(OfficeParserConfig.class, officeParserConfig);
metadataList = getRecursiveMetadata("testWORD_macros.docm", context);
//check that content came out of the .docm file
assertContains("quick", metadataList.get(0).get(TikaCoreProperties.TIKA_CONTENT));
assertContainsAtLeast(parsedBy, metadataList);
Metadata minExpected = new Metadata();
minExpected.add(TikaCoreProperties.TIKA_CONTENT.getName(), "Sub Embolden()");
minExpected.add(TikaCoreProperties.TIKA_CONTENT.getName(), "Sub Italicize()");
minExpected.add(Metadata.CONTENT_TYPE, "text/x-vbasic");
minExpected.add(TikaCoreProperties.EMBEDDED_RESOURCE_TYPE,
TikaCoreProperties.EmbeddedResourceType.MACRO.toString());
assertContainsAtLeast(minExpected, metadataList);
assertContainsAtLeast(parsedBy, metadataList);
//test configuring via config file
try (InputStream is = getResourceAsStream("tika-config-sax-macros.xml")) {
TikaConfig tikaConfig = new TikaConfig(is);
AutoDetectParser parser = new AutoDetectParser(tikaConfig);
metadataList = getRecursiveMetadata("testWORD_macros.docm", parser);
assertContainsAtLeast(minExpected, metadataList);
assertContainsAtLeast(parsedBy, metadataList);
}
}
@Test
public void testEmbedded() throws Exception {
List<Metadata> metadataList = getRecursiveMetadata("testWORD_embeded.docx", parseContext);
Metadata main = metadataList.get(0);
String content = main.get(TikaCoreProperties.TIKA_CONTENT);
//make sure mark up is there
assertContains("<img src=\"embedded:image2.jpeg\" alt=\"A description...\" />", content);
assertContains("<div class=\"embedded\" id=\"rId8\" />", content);
assertEquals(16, metadataList.size());
}
@Test
public void testDotx() throws Exception {
List<Metadata> metadataList = getRecursiveMetadata("testWORD_template.docx", parseContext);
String content = metadataList.get(0).get(TikaCoreProperties.TIKA_CONTENT);
assertContains("Metallica", content);
assertContains("Hetfield", content);
assertContains("one eye open", content);
assertContains("Getting the perfect", content);
//from glossary document
assertContains("table rows", content);
metadataList = getRecursiveMetadata("testWORD_template.dotx", parseContext);
content = metadataList.get(0).get(TikaCoreProperties.TIKA_CONTENT);
//from glossary document
assertContainsCount("ready to write", content, 2);
}
@Test
public void testDiagramData() throws Exception {
assertContains("From here", getXML("testWORD_diagramData.docx", parseContext).xml);
}
@Test
public void testDOCXChartData() throws Exception {
String xml = getXML("testWORD_charts.docx", parseContext).xml;
assertContains("peach", xml);
assertContains("March\tApril", xml);
assertNotContained("chartSpace", xml);
}
@Test
public void testHeaderFooterNotExtraction() throws Exception {
ParseContext parseContext = new ParseContext();
OfficeParserConfig officeParserConfig = new OfficeParserConfig();
officeParserConfig.setIncludeHeadersAndFooters(false);
officeParserConfig.setUseSAXDocxExtractor(true);
parseContext.set(OfficeParserConfig.class, officeParserConfig);
String xml = getXML("testWORD_various.docx", parseContext).xml;
assertNotContained("This is the header text.", xml);
assertNotContained("This is the footer text.", xml);
}
@Test
public void testDOCXPhoneticStrings() throws Exception {
OfficeParserConfig config = new OfficeParserConfig();
config.setUseSAXDocxExtractor(true);
ParseContext parseContext = new ParseContext();
parseContext.set(OfficeParserConfig.class, config);
assertContains("\u6771\u4EAC (\u3068\u3046\u304D\u3087\u3046)",
getXML("testWORD_phonetic.docx", parseContext).xml);
config.setConcatenatePhoneticRuns(false);
String xml = getXML("testWORD_phonetic.docx", parseContext).xml;
assertContains("\u6771\u4EAC", xml);
assertNotContained("\u3068", xml);
}
@Test
public void testTextDecoration() throws Exception {
String xml = getXML("testWORD_various.docx", parseContext).xml;
assertContains("<b>Bold</b>", xml);
assertContains("<i>italic</i>", xml);
assertContains("<u>underline</u>", xml);
assertContains("<strike>strikethrough</strike>", xml);
}
@Test
public void testTextDecorationNested() throws Exception {
String xml = getXML("testWORD_various.docx", parseContext).xml;
assertContains("<i>ita<strike>li</strike>c</i>", xml);
assertContains("<i>ita<strike>l<u>i</u></strike>c</i>", xml);
assertContains("<i><u>unde</u><strike><u>r</u></strike><u>line</u></i>", xml);
//confirm that spaces aren't added for <strike/> and <u/>
String txt = getText("testWORD_various.docx", new Metadata(), parseContext);
assertContainsCount("italic", txt, 3);
assertNotContained("ita ", txt);
assertContainsCount("underline", txt, 2);
assertNotContained("unde ", txt);
}
//TIKA-2807
@Test
public void testSDTInTextBox() throws Exception {
String xml = getXML("testWORD_sdtInTextBox.docx", parseContext).xml;
assertContains("rich-text-content-control_inside-text-box", xml);
assertContainsCount("inside-text", xml, 1);
}
}
|
googleapis/google-cloud-java | 36,028 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/ListDatasetsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/dataset_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Response message for
* [DatasetService.ListDatasets][google.cloud.aiplatform.v1.DatasetService.ListDatasets].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ListDatasetsResponse}
*/
public final class ListDatasetsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ListDatasetsResponse)
ListDatasetsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDatasetsResponse.newBuilder() to construct.
private ListDatasetsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDatasetsResponse() {
datasets_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDatasetsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_ListDatasetsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_ListDatasetsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ListDatasetsResponse.class,
com.google.cloud.aiplatform.v1.ListDatasetsResponse.Builder.class);
}
public static final int DATASETS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.aiplatform.v1.Dataset> datasets_;
/**
*
*
* <pre>
* A list of Datasets that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.aiplatform.v1.Dataset> getDatasetsList() {
return datasets_;
}
/**
*
*
* <pre>
* A list of Datasets that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.aiplatform.v1.DatasetOrBuilder>
getDatasetsOrBuilderList() {
return datasets_;
}
/**
*
*
* <pre>
* A list of Datasets that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public int getDatasetsCount() {
return datasets_.size();
}
/**
*
*
* <pre>
* A list of Datasets that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.Dataset getDatasets(int index) {
return datasets_.get(index);
}
/**
*
*
* <pre>
* A list of Datasets that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.DatasetOrBuilder getDatasetsOrBuilder(int index) {
return datasets_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < datasets_.size(); i++) {
output.writeMessage(1, datasets_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < datasets_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, datasets_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.ListDatasetsResponse)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.ListDatasetsResponse other =
(com.google.cloud.aiplatform.v1.ListDatasetsResponse) obj;
if (!getDatasetsList().equals(other.getDatasetsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDatasetsCount() > 0) {
hash = (37 * hash) + DATASETS_FIELD_NUMBER;
hash = (53 * hash) + getDatasetsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.ListDatasetsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListDatasetsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListDatasetsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListDatasetsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListDatasetsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListDatasetsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListDatasetsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListDatasetsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListDatasetsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListDatasetsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListDatasetsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListDatasetsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.aiplatform.v1.ListDatasetsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [DatasetService.ListDatasets][google.cloud.aiplatform.v1.DatasetService.ListDatasets].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ListDatasetsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ListDatasetsResponse)
com.google.cloud.aiplatform.v1.ListDatasetsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_ListDatasetsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_ListDatasetsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ListDatasetsResponse.class,
com.google.cloud.aiplatform.v1.ListDatasetsResponse.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.ListDatasetsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (datasetsBuilder_ == null) {
datasets_ = java.util.Collections.emptyList();
} else {
datasets_ = null;
datasetsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_ListDatasetsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListDatasetsResponse getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.ListDatasetsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListDatasetsResponse build() {
com.google.cloud.aiplatform.v1.ListDatasetsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListDatasetsResponse buildPartial() {
com.google.cloud.aiplatform.v1.ListDatasetsResponse result =
new com.google.cloud.aiplatform.v1.ListDatasetsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.aiplatform.v1.ListDatasetsResponse result) {
if (datasetsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
datasets_ = java.util.Collections.unmodifiableList(datasets_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.datasets_ = datasets_;
} else {
result.datasets_ = datasetsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.aiplatform.v1.ListDatasetsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.ListDatasetsResponse) {
return mergeFrom((com.google.cloud.aiplatform.v1.ListDatasetsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.ListDatasetsResponse other) {
if (other == com.google.cloud.aiplatform.v1.ListDatasetsResponse.getDefaultInstance())
return this;
if (datasetsBuilder_ == null) {
if (!other.datasets_.isEmpty()) {
if (datasets_.isEmpty()) {
datasets_ = other.datasets_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDatasetsIsMutable();
datasets_.addAll(other.datasets_);
}
onChanged();
}
} else {
if (!other.datasets_.isEmpty()) {
if (datasetsBuilder_.isEmpty()) {
datasetsBuilder_.dispose();
datasetsBuilder_ = null;
datasets_ = other.datasets_;
bitField0_ = (bitField0_ & ~0x00000001);
datasetsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDatasetsFieldBuilder()
: null;
} else {
datasetsBuilder_.addAllMessages(other.datasets_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.aiplatform.v1.Dataset m =
input.readMessage(
com.google.cloud.aiplatform.v1.Dataset.parser(), extensionRegistry);
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.add(m);
} else {
datasetsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.aiplatform.v1.Dataset> datasets_ =
java.util.Collections.emptyList();
private void ensureDatasetsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
datasets_ = new java.util.ArrayList<com.google.cloud.aiplatform.v1.Dataset>(datasets_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Dataset,
com.google.cloud.aiplatform.v1.Dataset.Builder,
com.google.cloud.aiplatform.v1.DatasetOrBuilder>
datasetsBuilder_;
/**
*
*
* <pre>
* A list of Datasets that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Dataset datasets = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.Dataset> getDatasetsList() {
if (datasetsBuilder_ == null) {
return java.util.Collections.unmodifiableList(datasets_);
} else {
return datasetsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* A list of Datasets that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Dataset datasets = 1;</code>
*/
public int getDatasetsCount() {
if (datasetsBuilder_ == null) {
return datasets_.size();
} else {
return datasetsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* A list of Datasets that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Dataset datasets = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Dataset getDatasets(int index) {
if (datasetsBuilder_ == null) {
return datasets_.get(index);
} else {
return datasetsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* A list of Datasets that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Dataset datasets = 1;</code>
*/
public Builder setDatasets(int index, com.google.cloud.aiplatform.v1.Dataset value) {
if (datasetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatasetsIsMutable();
datasets_.set(index, value);
onChanged();
} else {
datasetsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of Datasets that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Dataset datasets = 1;</code>
*/
public Builder setDatasets(
int index, com.google.cloud.aiplatform.v1.Dataset.Builder builderForValue) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.set(index, builderForValue.build());
onChanged();
} else {
datasetsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of Datasets that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Dataset datasets = 1;</code>
*/
public Builder addDatasets(com.google.cloud.aiplatform.v1.Dataset value) {
if (datasetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatasetsIsMutable();
datasets_.add(value);
onChanged();
} else {
datasetsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* A list of Datasets that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Dataset datasets = 1;</code>
*/
public Builder addDatasets(int index, com.google.cloud.aiplatform.v1.Dataset value) {
if (datasetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatasetsIsMutable();
datasets_.add(index, value);
onChanged();
} else {
datasetsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of Datasets that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Dataset datasets = 1;</code>
*/
public Builder addDatasets(com.google.cloud.aiplatform.v1.Dataset.Builder builderForValue) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.add(builderForValue.build());
onChanged();
} else {
datasetsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of Datasets that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Dataset datasets = 1;</code>
*/
public Builder addDatasets(
int index, com.google.cloud.aiplatform.v1.Dataset.Builder builderForValue) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.add(index, builderForValue.build());
onChanged();
} else {
datasetsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of Datasets that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Dataset datasets = 1;</code>
*/
public Builder addAllDatasets(
java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.Dataset> values) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, datasets_);
onChanged();
} else {
datasetsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* A list of Datasets that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Dataset datasets = 1;</code>
*/
public Builder clearDatasets() {
if (datasetsBuilder_ == null) {
datasets_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
datasetsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* A list of Datasets that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Dataset datasets = 1;</code>
*/
public Builder removeDatasets(int index) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.remove(index);
onChanged();
} else {
datasetsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* A list of Datasets that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Dataset datasets = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Dataset.Builder getDatasetsBuilder(int index) {
return getDatasetsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* A list of Datasets that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Dataset datasets = 1;</code>
*/
public com.google.cloud.aiplatform.v1.DatasetOrBuilder getDatasetsOrBuilder(int index) {
if (datasetsBuilder_ == null) {
return datasets_.get(index);
} else {
return datasetsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* A list of Datasets that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Dataset datasets = 1;</code>
*/
public java.util.List<? extends com.google.cloud.aiplatform.v1.DatasetOrBuilder>
getDatasetsOrBuilderList() {
if (datasetsBuilder_ != null) {
return datasetsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(datasets_);
}
}
/**
*
*
* <pre>
* A list of Datasets that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Dataset datasets = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Dataset.Builder addDatasetsBuilder() {
return getDatasetsFieldBuilder()
.addBuilder(com.google.cloud.aiplatform.v1.Dataset.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of Datasets that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Dataset datasets = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Dataset.Builder addDatasetsBuilder(int index) {
return getDatasetsFieldBuilder()
.addBuilder(index, com.google.cloud.aiplatform.v1.Dataset.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of Datasets that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Dataset datasets = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.Dataset.Builder> getDatasetsBuilderList() {
return getDatasetsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Dataset,
com.google.cloud.aiplatform.v1.Dataset.Builder,
com.google.cloud.aiplatform.v1.DatasetOrBuilder>
getDatasetsFieldBuilder() {
if (datasetsBuilder_ == null) {
datasetsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Dataset,
com.google.cloud.aiplatform.v1.Dataset.Builder,
com.google.cloud.aiplatform.v1.DatasetOrBuilder>(
datasets_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
datasets_ = null;
}
return datasetsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ListDatasetsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ListDatasetsResponse)
private static final com.google.cloud.aiplatform.v1.ListDatasetsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ListDatasetsResponse();
}
public static com.google.cloud.aiplatform.v1.ListDatasetsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDatasetsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListDatasetsResponse>() {
@java.lang.Override
public ListDatasetsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDatasetsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDatasetsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListDatasetsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,013 | java-alloydb/proto-google-cloud-alloydb-v1beta/src/main/java/com/google/cloud/alloydb/v1beta/MigrationSource.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/alloydb/v1beta/resources.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.alloydb.v1beta;
/**
*
*
* <pre>
* Subset of the source instance configuration that is available when reading
* the cluster resource.
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1beta.MigrationSource}
*/
public final class MigrationSource extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.alloydb.v1beta.MigrationSource)
MigrationSourceOrBuilder {
private static final long serialVersionUID = 0L;
// Use MigrationSource.newBuilder() to construct.
private MigrationSource(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MigrationSource() {
hostPort_ = "";
referenceId_ = "";
sourceType_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new MigrationSource();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1beta.ResourcesProto
.internal_static_google_cloud_alloydb_v1beta_MigrationSource_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1beta.ResourcesProto
.internal_static_google_cloud_alloydb_v1beta_MigrationSource_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1beta.MigrationSource.class,
com.google.cloud.alloydb.v1beta.MigrationSource.Builder.class);
}
/**
*
*
* <pre>
* Denote the type of migration source that created this cluster.
* </pre>
*
* Protobuf enum {@code google.cloud.alloydb.v1beta.MigrationSource.MigrationSourceType}
*/
public enum MigrationSourceType implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Migration source is unknown.
* </pre>
*
* <code>MIGRATION_SOURCE_TYPE_UNSPECIFIED = 0;</code>
*/
MIGRATION_SOURCE_TYPE_UNSPECIFIED(0),
/**
*
*
* <pre>
* DMS source means the cluster was created via DMS migration job.
* </pre>
*
* <code>DMS = 1;</code>
*/
DMS(1),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Migration source is unknown.
* </pre>
*
* <code>MIGRATION_SOURCE_TYPE_UNSPECIFIED = 0;</code>
*/
public static final int MIGRATION_SOURCE_TYPE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* DMS source means the cluster was created via DMS migration job.
* </pre>
*
* <code>DMS = 1;</code>
*/
public static final int DMS_VALUE = 1;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static MigrationSourceType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static MigrationSourceType forNumber(int value) {
switch (value) {
case 0:
return MIGRATION_SOURCE_TYPE_UNSPECIFIED;
case 1:
return DMS;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<MigrationSourceType>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<MigrationSourceType>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<MigrationSourceType>() {
public MigrationSourceType findValueByNumber(int number) {
return MigrationSourceType.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.alloydb.v1beta.MigrationSource.getDescriptor().getEnumTypes().get(0);
}
private static final MigrationSourceType[] VALUES = values();
public static MigrationSourceType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private MigrationSourceType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.alloydb.v1beta.MigrationSource.MigrationSourceType)
}
public static final int HOST_PORT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object hostPort_ = "";
/**
*
*
* <pre>
* Output only. The host and port of the on-premises instance in host:port
* format
* </pre>
*
* <code>string host_port = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The hostPort.
*/
@java.lang.Override
public java.lang.String getHostPort() {
java.lang.Object ref = hostPort_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
hostPort_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. The host and port of the on-premises instance in host:port
* format
* </pre>
*
* <code>string host_port = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for hostPort.
*/
@java.lang.Override
public com.google.protobuf.ByteString getHostPortBytes() {
java.lang.Object ref = hostPort_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
hostPort_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REFERENCE_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object referenceId_ = "";
/**
*
*
* <pre>
* Output only. Place holder for the external source identifier(e.g DMS job
* name) that created the cluster.
* </pre>
*
* <code>string reference_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The referenceId.
*/
@java.lang.Override
public java.lang.String getReferenceId() {
java.lang.Object ref = referenceId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
referenceId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. Place holder for the external source identifier(e.g DMS job
* name) that created the cluster.
* </pre>
*
* <code>string reference_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for referenceId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getReferenceIdBytes() {
java.lang.Object ref = referenceId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
referenceId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SOURCE_TYPE_FIELD_NUMBER = 3;
private int sourceType_ = 0;
/**
*
*
* <pre>
* Output only. Type of migration source.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.MigrationSource.MigrationSourceType source_type = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for sourceType.
*/
@java.lang.Override
public int getSourceTypeValue() {
return sourceType_;
}
/**
*
*
* <pre>
* Output only. Type of migration source.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.MigrationSource.MigrationSourceType source_type = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The sourceType.
*/
@java.lang.Override
public com.google.cloud.alloydb.v1beta.MigrationSource.MigrationSourceType getSourceType() {
com.google.cloud.alloydb.v1beta.MigrationSource.MigrationSourceType result =
com.google.cloud.alloydb.v1beta.MigrationSource.MigrationSourceType.forNumber(sourceType_);
return result == null
? com.google.cloud.alloydb.v1beta.MigrationSource.MigrationSourceType.UNRECOGNIZED
: result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostPort_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, hostPort_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(referenceId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, referenceId_);
}
if (sourceType_
!= com.google.cloud.alloydb.v1beta.MigrationSource.MigrationSourceType
.MIGRATION_SOURCE_TYPE_UNSPECIFIED
.getNumber()) {
output.writeEnum(3, sourceType_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostPort_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, hostPort_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(referenceId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, referenceId_);
}
if (sourceType_
!= com.google.cloud.alloydb.v1beta.MigrationSource.MigrationSourceType
.MIGRATION_SOURCE_TYPE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, sourceType_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.alloydb.v1beta.MigrationSource)) {
return super.equals(obj);
}
com.google.cloud.alloydb.v1beta.MigrationSource other =
(com.google.cloud.alloydb.v1beta.MigrationSource) obj;
if (!getHostPort().equals(other.getHostPort())) return false;
if (!getReferenceId().equals(other.getReferenceId())) return false;
if (sourceType_ != other.sourceType_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + HOST_PORT_FIELD_NUMBER;
hash = (53 * hash) + getHostPort().hashCode();
hash = (37 * hash) + REFERENCE_ID_FIELD_NUMBER;
hash = (53 * hash) + getReferenceId().hashCode();
hash = (37 * hash) + SOURCE_TYPE_FIELD_NUMBER;
hash = (53 * hash) + sourceType_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.alloydb.v1beta.MigrationSource parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1beta.MigrationSource parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1beta.MigrationSource parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1beta.MigrationSource parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1beta.MigrationSource parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1beta.MigrationSource parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1beta.MigrationSource parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1beta.MigrationSource parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1beta.MigrationSource parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1beta.MigrationSource parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1beta.MigrationSource parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1beta.MigrationSource parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.alloydb.v1beta.MigrationSource prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Subset of the source instance configuration that is available when reading
* the cluster resource.
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1beta.MigrationSource}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.alloydb.v1beta.MigrationSource)
com.google.cloud.alloydb.v1beta.MigrationSourceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1beta.ResourcesProto
.internal_static_google_cloud_alloydb_v1beta_MigrationSource_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1beta.ResourcesProto
.internal_static_google_cloud_alloydb_v1beta_MigrationSource_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1beta.MigrationSource.class,
com.google.cloud.alloydb.v1beta.MigrationSource.Builder.class);
}
// Construct using com.google.cloud.alloydb.v1beta.MigrationSource.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
hostPort_ = "";
referenceId_ = "";
sourceType_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.alloydb.v1beta.ResourcesProto
.internal_static_google_cloud_alloydb_v1beta_MigrationSource_descriptor;
}
@java.lang.Override
public com.google.cloud.alloydb.v1beta.MigrationSource getDefaultInstanceForType() {
return com.google.cloud.alloydb.v1beta.MigrationSource.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.alloydb.v1beta.MigrationSource build() {
com.google.cloud.alloydb.v1beta.MigrationSource result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.alloydb.v1beta.MigrationSource buildPartial() {
com.google.cloud.alloydb.v1beta.MigrationSource result =
new com.google.cloud.alloydb.v1beta.MigrationSource(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.alloydb.v1beta.MigrationSource result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.hostPort_ = hostPort_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.referenceId_ = referenceId_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.sourceType_ = sourceType_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.alloydb.v1beta.MigrationSource) {
return mergeFrom((com.google.cloud.alloydb.v1beta.MigrationSource) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.alloydb.v1beta.MigrationSource other) {
if (other == com.google.cloud.alloydb.v1beta.MigrationSource.getDefaultInstance())
return this;
if (!other.getHostPort().isEmpty()) {
hostPort_ = other.hostPort_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getReferenceId().isEmpty()) {
referenceId_ = other.referenceId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.sourceType_ != 0) {
setSourceTypeValue(other.getSourceTypeValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
hostPort_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
referenceId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
sourceType_ = input.readEnum();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object hostPort_ = "";
/**
*
*
* <pre>
* Output only. The host and port of the on-premises instance in host:port
* format
* </pre>
*
* <code>string host_port = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The hostPort.
*/
public java.lang.String getHostPort() {
java.lang.Object ref = hostPort_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
hostPort_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. The host and port of the on-premises instance in host:port
* format
* </pre>
*
* <code>string host_port = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for hostPort.
*/
public com.google.protobuf.ByteString getHostPortBytes() {
java.lang.Object ref = hostPort_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
hostPort_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. The host and port of the on-premises instance in host:port
* format
* </pre>
*
* <code>string host_port = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The hostPort to set.
* @return This builder for chaining.
*/
public Builder setHostPort(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
hostPort_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The host and port of the on-premises instance in host:port
* format
* </pre>
*
* <code>string host_port = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearHostPort() {
hostPort_ = getDefaultInstance().getHostPort();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The host and port of the on-premises instance in host:port
* format
* </pre>
*
* <code>string host_port = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for hostPort to set.
* @return This builder for chaining.
*/
public Builder setHostPortBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
hostPort_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object referenceId_ = "";
/**
*
*
* <pre>
* Output only. Place holder for the external source identifier(e.g DMS job
* name) that created the cluster.
* </pre>
*
* <code>string reference_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The referenceId.
*/
public java.lang.String getReferenceId() {
java.lang.Object ref = referenceId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
referenceId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. Place holder for the external source identifier(e.g DMS job
* name) that created the cluster.
* </pre>
*
* <code>string reference_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for referenceId.
*/
public com.google.protobuf.ByteString getReferenceIdBytes() {
java.lang.Object ref = referenceId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
referenceId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. Place holder for the external source identifier(e.g DMS job
* name) that created the cluster.
* </pre>
*
* <code>string reference_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The referenceId to set.
* @return This builder for chaining.
*/
public Builder setReferenceId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
referenceId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Place holder for the external source identifier(e.g DMS job
* name) that created the cluster.
* </pre>
*
* <code>string reference_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearReferenceId() {
referenceId_ = getDefaultInstance().getReferenceId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Place holder for the external source identifier(e.g DMS job
* name) that created the cluster.
* </pre>
*
* <code>string reference_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for referenceId to set.
* @return This builder for chaining.
*/
public Builder setReferenceIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
referenceId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int sourceType_ = 0;
/**
*
*
* <pre>
* Output only. Type of migration source.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.MigrationSource.MigrationSourceType source_type = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for sourceType.
*/
@java.lang.Override
public int getSourceTypeValue() {
return sourceType_;
}
/**
*
*
* <pre>
* Output only. Type of migration source.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.MigrationSource.MigrationSourceType source_type = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The enum numeric value on the wire for sourceType to set.
* @return This builder for chaining.
*/
public Builder setSourceTypeValue(int value) {
sourceType_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Type of migration source.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.MigrationSource.MigrationSourceType source_type = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The sourceType.
*/
@java.lang.Override
public com.google.cloud.alloydb.v1beta.MigrationSource.MigrationSourceType getSourceType() {
com.google.cloud.alloydb.v1beta.MigrationSource.MigrationSourceType result =
com.google.cloud.alloydb.v1beta.MigrationSource.MigrationSourceType.forNumber(
sourceType_);
return result == null
? com.google.cloud.alloydb.v1beta.MigrationSource.MigrationSourceType.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Output only. Type of migration source.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.MigrationSource.MigrationSourceType source_type = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The sourceType to set.
* @return This builder for chaining.
*/
public Builder setSourceType(
com.google.cloud.alloydb.v1beta.MigrationSource.MigrationSourceType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
sourceType_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Type of migration source.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.MigrationSource.MigrationSourceType source_type = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearSourceType() {
bitField0_ = (bitField0_ & ~0x00000004);
sourceType_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.alloydb.v1beta.MigrationSource)
}
// @@protoc_insertion_point(class_scope:google.cloud.alloydb.v1beta.MigrationSource)
private static final com.google.cloud.alloydb.v1beta.MigrationSource DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.alloydb.v1beta.MigrationSource();
}
public static com.google.cloud.alloydb.v1beta.MigrationSource getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MigrationSource> PARSER =
new com.google.protobuf.AbstractParser<MigrationSource>() {
@java.lang.Override
public MigrationSource parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<MigrationSource> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MigrationSource> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.alloydb.v1beta.MigrationSource getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,228 | java-beyondcorp-clientgateways/grpc-google-cloud-beyondcorp-clientgateways-v1/src/main/java/com/google/cloud/beyondcorp/clientgateways/v1/ClientGatewaysServiceGrpc.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.beyondcorp.clientgateways.v1;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
*
*
* <pre>
* API Overview:
* The `beyondcorp.googleapis.com` service implements the Google Cloud
* BeyondCorp API.
* Data Model:
* The ClientGatewaysService exposes the following resources:
* * Client Gateways, named as follows:
* `projects/{project_id}/locations/{location_id}/clientGateways/{client_gateway_id}`.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/cloud/beyondcorp/clientgateways/v1/client_gateways_service.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class ClientGatewaysServiceGrpc {
private ClientGatewaysServiceGrpc() {}
public static final java.lang.String SERVICE_NAME =
"google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysRequest,
com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysResponse>
getListClientGatewaysMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "ListClientGateways",
requestType = com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysRequest.class,
responseType = com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysRequest,
com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysResponse>
getListClientGatewaysMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysRequest,
com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysResponse>
getListClientGatewaysMethod;
if ((getListClientGatewaysMethod = ClientGatewaysServiceGrpc.getListClientGatewaysMethod)
== null) {
synchronized (ClientGatewaysServiceGrpc.class) {
if ((getListClientGatewaysMethod = ClientGatewaysServiceGrpc.getListClientGatewaysMethod)
== null) {
ClientGatewaysServiceGrpc.getListClientGatewaysMethod =
getListClientGatewaysMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysRequest,
com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListClientGateways"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.beyondcorp.clientgateways.v1
.ListClientGatewaysRequest.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.beyondcorp.clientgateways.v1
.ListClientGatewaysResponse.getDefaultInstance()))
.setSchemaDescriptor(
new ClientGatewaysServiceMethodDescriptorSupplier("ListClientGateways"))
.build();
}
}
}
return getListClientGatewaysMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.beyondcorp.clientgateways.v1.GetClientGatewayRequest,
com.google.cloud.beyondcorp.clientgateways.v1.ClientGateway>
getGetClientGatewayMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GetClientGateway",
requestType = com.google.cloud.beyondcorp.clientgateways.v1.GetClientGatewayRequest.class,
responseType = com.google.cloud.beyondcorp.clientgateways.v1.ClientGateway.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.beyondcorp.clientgateways.v1.GetClientGatewayRequest,
com.google.cloud.beyondcorp.clientgateways.v1.ClientGateway>
getGetClientGatewayMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.beyondcorp.clientgateways.v1.GetClientGatewayRequest,
com.google.cloud.beyondcorp.clientgateways.v1.ClientGateway>
getGetClientGatewayMethod;
if ((getGetClientGatewayMethod = ClientGatewaysServiceGrpc.getGetClientGatewayMethod) == null) {
synchronized (ClientGatewaysServiceGrpc.class) {
if ((getGetClientGatewayMethod = ClientGatewaysServiceGrpc.getGetClientGatewayMethod)
== null) {
ClientGatewaysServiceGrpc.getGetClientGatewayMethod =
getGetClientGatewayMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.beyondcorp.clientgateways.v1.GetClientGatewayRequest,
com.google.cloud.beyondcorp.clientgateways.v1.ClientGateway>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetClientGateway"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.beyondcorp.clientgateways.v1.GetClientGatewayRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.beyondcorp.clientgateways.v1.ClientGateway
.getDefaultInstance()))
.setSchemaDescriptor(
new ClientGatewaysServiceMethodDescriptorSupplier("GetClientGateway"))
.build();
}
}
}
return getGetClientGatewayMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.beyondcorp.clientgateways.v1.CreateClientGatewayRequest,
com.google.longrunning.Operation>
getCreateClientGatewayMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "CreateClientGateway",
requestType = com.google.cloud.beyondcorp.clientgateways.v1.CreateClientGatewayRequest.class,
responseType = com.google.longrunning.Operation.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.beyondcorp.clientgateways.v1.CreateClientGatewayRequest,
com.google.longrunning.Operation>
getCreateClientGatewayMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.beyondcorp.clientgateways.v1.CreateClientGatewayRequest,
com.google.longrunning.Operation>
getCreateClientGatewayMethod;
if ((getCreateClientGatewayMethod = ClientGatewaysServiceGrpc.getCreateClientGatewayMethod)
== null) {
synchronized (ClientGatewaysServiceGrpc.class) {
if ((getCreateClientGatewayMethod = ClientGatewaysServiceGrpc.getCreateClientGatewayMethod)
== null) {
ClientGatewaysServiceGrpc.getCreateClientGatewayMethod =
getCreateClientGatewayMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.beyondcorp.clientgateways.v1.CreateClientGatewayRequest,
com.google.longrunning.Operation>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
generateFullMethodName(SERVICE_NAME, "CreateClientGateway"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.beyondcorp.clientgateways.v1
.CreateClientGatewayRequest.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.Operation.getDefaultInstance()))
.setSchemaDescriptor(
new ClientGatewaysServiceMethodDescriptorSupplier("CreateClientGateway"))
.build();
}
}
}
return getCreateClientGatewayMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest,
com.google.longrunning.Operation>
getDeleteClientGatewayMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "DeleteClientGateway",
requestType = com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest.class,
responseType = com.google.longrunning.Operation.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest,
com.google.longrunning.Operation>
getDeleteClientGatewayMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest,
com.google.longrunning.Operation>
getDeleteClientGatewayMethod;
if ((getDeleteClientGatewayMethod = ClientGatewaysServiceGrpc.getDeleteClientGatewayMethod)
== null) {
synchronized (ClientGatewaysServiceGrpc.class) {
if ((getDeleteClientGatewayMethod = ClientGatewaysServiceGrpc.getDeleteClientGatewayMethod)
== null) {
ClientGatewaysServiceGrpc.getDeleteClientGatewayMethod =
getDeleteClientGatewayMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest,
com.google.longrunning.Operation>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
generateFullMethodName(SERVICE_NAME, "DeleteClientGateway"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.beyondcorp.clientgateways.v1
.DeleteClientGatewayRequest.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.Operation.getDefaultInstance()))
.setSchemaDescriptor(
new ClientGatewaysServiceMethodDescriptorSupplier("DeleteClientGateway"))
.build();
}
}
}
return getDeleteClientGatewayMethod;
}
/** Creates a new async stub that supports all call types for the service */
public static ClientGatewaysServiceStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<ClientGatewaysServiceStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<ClientGatewaysServiceStub>() {
@java.lang.Override
public ClientGatewaysServiceStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ClientGatewaysServiceStub(channel, callOptions);
}
};
return ClientGatewaysServiceStub.newStub(factory, channel);
}
/** Creates a new blocking-style stub that supports all types of calls on the service */
public static ClientGatewaysServiceBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<ClientGatewaysServiceBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<ClientGatewaysServiceBlockingV2Stub>() {
@java.lang.Override
public ClientGatewaysServiceBlockingV2Stub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ClientGatewaysServiceBlockingV2Stub(channel, callOptions);
}
};
return ClientGatewaysServiceBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static ClientGatewaysServiceBlockingStub newBlockingStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<ClientGatewaysServiceBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<ClientGatewaysServiceBlockingStub>() {
@java.lang.Override
public ClientGatewaysServiceBlockingStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ClientGatewaysServiceBlockingStub(channel, callOptions);
}
};
return ClientGatewaysServiceBlockingStub.newStub(factory, channel);
}
/** Creates a new ListenableFuture-style stub that supports unary calls on the service */
public static ClientGatewaysServiceFutureStub newFutureStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<ClientGatewaysServiceFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<ClientGatewaysServiceFutureStub>() {
@java.lang.Override
public ClientGatewaysServiceFutureStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ClientGatewaysServiceFutureStub(channel, callOptions);
}
};
return ClientGatewaysServiceFutureStub.newStub(factory, channel);
}
/**
*
*
* <pre>
* API Overview:
* The `beyondcorp.googleapis.com` service implements the Google Cloud
* BeyondCorp API.
* Data Model:
* The ClientGatewaysService exposes the following resources:
* * Client Gateways, named as follows:
* `projects/{project_id}/locations/{location_id}/clientGateways/{client_gateway_id}`.
* </pre>
*/
public interface AsyncService {
/**
*
*
* <pre>
* Lists ClientGateways in a given project and location.
* </pre>
*/
default void listClientGateways(
com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysRequest request,
io.grpc.stub.StreamObserver<
com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getListClientGatewaysMethod(), responseObserver);
}
/**
*
*
* <pre>
* Gets details of a single ClientGateway.
* </pre>
*/
default void getClientGateway(
com.google.cloud.beyondcorp.clientgateways.v1.GetClientGatewayRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.beyondcorp.clientgateways.v1.ClientGateway>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getGetClientGatewayMethod(), responseObserver);
}
/**
*
*
* <pre>
* Creates a new ClientGateway in a given project and location.
* </pre>
*/
default void createClientGateway(
com.google.cloud.beyondcorp.clientgateways.v1.CreateClientGatewayRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getCreateClientGatewayMethod(), responseObserver);
}
/**
*
*
* <pre>
* Deletes a single ClientGateway.
* </pre>
*/
default void deleteClientGateway(
com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getDeleteClientGatewayMethod(), responseObserver);
}
}
/**
* Base class for the server implementation of the service ClientGatewaysService.
*
* <pre>
* API Overview:
* The `beyondcorp.googleapis.com` service implements the Google Cloud
* BeyondCorp API.
* Data Model:
* The ClientGatewaysService exposes the following resources:
* * Client Gateways, named as follows:
* `projects/{project_id}/locations/{location_id}/clientGateways/{client_gateway_id}`.
* </pre>
*/
public abstract static class ClientGatewaysServiceImplBase
implements io.grpc.BindableService, AsyncService {
@java.lang.Override
public final io.grpc.ServerServiceDefinition bindService() {
return ClientGatewaysServiceGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service ClientGatewaysService.
*
* <pre>
* API Overview:
* The `beyondcorp.googleapis.com` service implements the Google Cloud
* BeyondCorp API.
* Data Model:
* The ClientGatewaysService exposes the following resources:
* * Client Gateways, named as follows:
* `projects/{project_id}/locations/{location_id}/clientGateways/{client_gateway_id}`.
* </pre>
*/
public static final class ClientGatewaysServiceStub
extends io.grpc.stub.AbstractAsyncStub<ClientGatewaysServiceStub> {
private ClientGatewaysServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ClientGatewaysServiceStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ClientGatewaysServiceStub(channel, callOptions);
}
/**
*
*
* <pre>
* Lists ClientGateways in a given project and location.
* </pre>
*/
public void listClientGateways(
com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysRequest request,
io.grpc.stub.StreamObserver<
com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getListClientGatewaysMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Gets details of a single ClientGateway.
* </pre>
*/
public void getClientGateway(
com.google.cloud.beyondcorp.clientgateways.v1.GetClientGatewayRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.beyondcorp.clientgateways.v1.ClientGateway>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGetClientGatewayMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Creates a new ClientGateway in a given project and location.
* </pre>
*/
public void createClientGateway(
com.google.cloud.beyondcorp.clientgateways.v1.CreateClientGatewayRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getCreateClientGatewayMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Deletes a single ClientGateway.
* </pre>
*/
public void deleteClientGateway(
com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getDeleteClientGatewayMethod(), getCallOptions()),
request,
responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service ClientGatewaysService.
*
* <pre>
* API Overview:
* The `beyondcorp.googleapis.com` service implements the Google Cloud
* BeyondCorp API.
* Data Model:
* The ClientGatewaysService exposes the following resources:
* * Client Gateways, named as follows:
* `projects/{project_id}/locations/{location_id}/clientGateways/{client_gateway_id}`.
* </pre>
*/
public static final class ClientGatewaysServiceBlockingV2Stub
extends io.grpc.stub.AbstractBlockingStub<ClientGatewaysServiceBlockingV2Stub> {
private ClientGatewaysServiceBlockingV2Stub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ClientGatewaysServiceBlockingV2Stub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ClientGatewaysServiceBlockingV2Stub(channel, callOptions);
}
/**
*
*
* <pre>
* Lists ClientGateways in a given project and location.
* </pre>
*/
public com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysResponse
listClientGateways(
com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListClientGatewaysMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Gets details of a single ClientGateway.
* </pre>
*/
public com.google.cloud.beyondcorp.clientgateways.v1.ClientGateway getClientGateway(
com.google.cloud.beyondcorp.clientgateways.v1.GetClientGatewayRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetClientGatewayMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Creates a new ClientGateway in a given project and location.
* </pre>
*/
public com.google.longrunning.Operation createClientGateway(
com.google.cloud.beyondcorp.clientgateways.v1.CreateClientGatewayRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateClientGatewayMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes a single ClientGateway.
* </pre>
*/
public com.google.longrunning.Operation deleteClientGateway(
com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteClientGatewayMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service ClientGatewaysService.
*
* <pre>
* API Overview:
* The `beyondcorp.googleapis.com` service implements the Google Cloud
* BeyondCorp API.
* Data Model:
* The ClientGatewaysService exposes the following resources:
* * Client Gateways, named as follows:
* `projects/{project_id}/locations/{location_id}/clientGateways/{client_gateway_id}`.
* </pre>
*/
public static final class ClientGatewaysServiceBlockingStub
extends io.grpc.stub.AbstractBlockingStub<ClientGatewaysServiceBlockingStub> {
private ClientGatewaysServiceBlockingStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ClientGatewaysServiceBlockingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ClientGatewaysServiceBlockingStub(channel, callOptions);
}
/**
*
*
* <pre>
* Lists ClientGateways in a given project and location.
* </pre>
*/
public com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysResponse
listClientGateways(
com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListClientGatewaysMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Gets details of a single ClientGateway.
* </pre>
*/
public com.google.cloud.beyondcorp.clientgateways.v1.ClientGateway getClientGateway(
com.google.cloud.beyondcorp.clientgateways.v1.GetClientGatewayRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetClientGatewayMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Creates a new ClientGateway in a given project and location.
* </pre>
*/
public com.google.longrunning.Operation createClientGateway(
com.google.cloud.beyondcorp.clientgateways.v1.CreateClientGatewayRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateClientGatewayMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes a single ClientGateway.
* </pre>
*/
public com.google.longrunning.Operation deleteClientGateway(
com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteClientGatewayMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do ListenableFuture-style rpc calls to service
* ClientGatewaysService.
*
* <pre>
* API Overview:
* The `beyondcorp.googleapis.com` service implements the Google Cloud
* BeyondCorp API.
* Data Model:
* The ClientGatewaysService exposes the following resources:
* * Client Gateways, named as follows:
* `projects/{project_id}/locations/{location_id}/clientGateways/{client_gateway_id}`.
* </pre>
*/
public static final class ClientGatewaysServiceFutureStub
extends io.grpc.stub.AbstractFutureStub<ClientGatewaysServiceFutureStub> {
private ClientGatewaysServiceFutureStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ClientGatewaysServiceFutureStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ClientGatewaysServiceFutureStub(channel, callOptions);
}
/**
*
*
* <pre>
* Lists ClientGateways in a given project and location.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysResponse>
listClientGateways(
com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getListClientGatewaysMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Gets details of a single ClientGateway.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.beyondcorp.clientgateways.v1.ClientGateway>
getClientGateway(
com.google.cloud.beyondcorp.clientgateways.v1.GetClientGatewayRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGetClientGatewayMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Creates a new ClientGateway in a given project and location.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation>
createClientGateway(
com.google.cloud.beyondcorp.clientgateways.v1.CreateClientGatewayRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getCreateClientGatewayMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Deletes a single ClientGateway.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation>
deleteClientGateway(
com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getDeleteClientGatewayMethod(), getCallOptions()), request);
}
}
private static final int METHODID_LIST_CLIENT_GATEWAYS = 0;
private static final int METHODID_GET_CLIENT_GATEWAY = 1;
private static final int METHODID_CREATE_CLIENT_GATEWAY = 2;
private static final int METHODID_DELETE_CLIENT_GATEWAY = 3;
private static final class MethodHandlers<Req, Resp>
implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AsyncService serviceImpl;
private final int methodId;
MethodHandlers(AsyncService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_LIST_CLIENT_GATEWAYS:
serviceImpl.listClientGateways(
(com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysRequest) request,
(io.grpc.stub.StreamObserver<
com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysResponse>)
responseObserver);
break;
case METHODID_GET_CLIENT_GATEWAY:
serviceImpl.getClientGateway(
(com.google.cloud.beyondcorp.clientgateways.v1.GetClientGatewayRequest) request,
(io.grpc.stub.StreamObserver<
com.google.cloud.beyondcorp.clientgateways.v1.ClientGateway>)
responseObserver);
break;
case METHODID_CREATE_CLIENT_GATEWAY:
serviceImpl.createClientGateway(
(com.google.cloud.beyondcorp.clientgateways.v1.CreateClientGatewayRequest) request,
(io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver);
break;
case METHODID_DELETE_CLIENT_GATEWAY:
serviceImpl.deleteClientGateway(
(com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest) request,
(io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getListClientGatewaysMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysRequest,
com.google.cloud.beyondcorp.clientgateways.v1.ListClientGatewaysResponse>(
service, METHODID_LIST_CLIENT_GATEWAYS)))
.addMethod(
getGetClientGatewayMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.beyondcorp.clientgateways.v1.GetClientGatewayRequest,
com.google.cloud.beyondcorp.clientgateways.v1.ClientGateway>(
service, METHODID_GET_CLIENT_GATEWAY)))
.addMethod(
getCreateClientGatewayMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.beyondcorp.clientgateways.v1.CreateClientGatewayRequest,
com.google.longrunning.Operation>(service, METHODID_CREATE_CLIENT_GATEWAY)))
.addMethod(
getDeleteClientGatewayMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest,
com.google.longrunning.Operation>(service, METHODID_DELETE_CLIENT_GATEWAY)))
.build();
}
private abstract static class ClientGatewaysServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier,
io.grpc.protobuf.ProtoServiceDescriptorSupplier {
ClientGatewaysServiceBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysServiceProto
.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("ClientGatewaysService");
}
}
private static final class ClientGatewaysServiceFileDescriptorSupplier
extends ClientGatewaysServiceBaseDescriptorSupplier {
ClientGatewaysServiceFileDescriptorSupplier() {}
}
private static final class ClientGatewaysServiceMethodDescriptorSupplier
extends ClientGatewaysServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
ClientGatewaysServiceMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (ClientGatewaysServiceGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor =
result =
io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new ClientGatewaysServiceFileDescriptorSupplier())
.addMethod(getListClientGatewaysMethod())
.addMethod(getGetClientGatewayMethod())
.addMethod(getCreateClientGatewayMethod())
.addMethod(getDeleteClientGatewayMethod())
.build();
}
}
}
return result;
}
}
|
apache/royale-compiler | 36,311 | compiler/src/main/java/org/apache/royale/compiler/internal/targets/RoyaleTarget.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.royale.compiler.internal.targets;
import java.io.File;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.Map.Entry;
import org.apache.royale.abc.ABCConstants;
import org.apache.royale.abc.instructionlist.InstructionList;
import org.apache.royale.abc.semantics.Label;
import org.apache.royale.abc.semantics.Name;
import org.apache.royale.abc.visitors.ITraitVisitor;
import org.apache.royale.abc.visitors.ITraitsVisitor;
import org.apache.royale.compiler.common.VersionInfo;
import org.apache.royale.compiler.config.RSLSettings;
import org.apache.royale.compiler.config.RSLSettings.RSLAndPolicyFileURLPair;
import org.apache.royale.compiler.constants.IASLanguageConstants;
import org.apache.royale.compiler.constants.IMetaAttributeConstants;
import org.apache.royale.compiler.definitions.IDefinition;
import org.apache.royale.compiler.definitions.metadata.IMetaTag;
import org.apache.royale.compiler.definitions.references.IResolvedQualifiersReference;
import org.apache.royale.compiler.definitions.references.ReferenceFactory;
import org.apache.royale.compiler.internal.abc.ClassGeneratorHelper;
import org.apache.royale.compiler.internal.as.codegen.LexicalScope;
import org.apache.royale.compiler.internal.config.FrameInfo;
import org.apache.royale.compiler.internal.definitions.ClassDefinition;
import org.apache.royale.compiler.internal.definitions.NamespaceDefinition;
import org.apache.royale.compiler.internal.projects.RoyaleProject;
import org.apache.royale.compiler.internal.targets.Target.DirectDependencies;
import org.apache.royale.compiler.mxml.IMXMLTypeConstants;
import org.apache.royale.compiler.problems.ICompilerProblem;
import org.apache.royale.compiler.problems.InvalidBackgroundColorProblem;
import org.apache.royale.compiler.problems.MissingSignedDigestProblem;
import org.apache.royale.compiler.problems.MissingUnsignedDigestProblem;
import org.apache.royale.compiler.targets.ITargetSettings;
import org.apache.royale.compiler.tree.as.IASNode;
import org.apache.royale.compiler.units.ICompilationUnit;
import org.apache.royale.compiler.units.requests.IFileScopeRequestResult;
import org.apache.royale.swc.ISWC;
import org.apache.royale.swc.ISWCDigest;
import org.apache.royale.swc.ISWCLibrary;
import org.apache.royale.swc.ISWCManager;
import org.apache.royale.swf.ISWF;
import org.apache.royale.swf.tags.ProductInfoTag;
import org.apache.royale.swf.tags.ProductInfoTag.Edition;
import org.apache.royale.swf.tags.ProductInfoTag.Product;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import static org.apache.royale.compiler.mxml.IMXMLLanguageConstants.*;
/**
* Delegate class used by Royale specific targets to generate Royale specific code.
* If we were writing this compiler in C++ this would be a mix-in class.
*/
public abstract class RoyaleTarget
{
public RoyaleTarget(ITargetSettings targetSettings, RoyaleProject project)
{
royaleProject = project;
this.targetSettings = targetSettings;
accessibleClassNames = new HashSet<String>();
}
protected final RoyaleProject royaleProject;
private final ITargetSettings targetSettings;
/**
* {@link Set} of classes referenced from accessibility meta-data.
* <p>
* For Example:
* <p>
* {@code [AccessibilityClass(implementation="mx.accessibility.ButtonAccImpl")]}
* <p>
* This set is accumulated as we discover direct dependencies, which is not as
* maintainable as I would like.
*/
protected final HashSet<String> accessibleClassNames;
/**
* Codegen IFlexModuleFactory.callInContext();
*
* public final override function callInContext(fn:Function, thisArg:Object, argArray:Array, returns:Boolean=true) : *
* {
* var ret : * = fn.apply(thisArg, argArray);
* if (returns) return ret;
* return;
* }
*
* @param classGen
* @param isOverride true if the generated method overrides a base class
* method, false otherwise.
*/
protected final void codegenCallInContextMethod(ClassGeneratorHelper classGen, boolean isOverride)
{
IResolvedQualifiersReference applyReference = ReferenceFactory.resolvedQualifierQualifiedReference(royaleProject.getWorkspace(),
NamespaceDefinition.getAS3NamespaceDefinition(), "apply");
InstructionList callInContext = new InstructionList();
callInContext.addInstruction(ABCConstants.OP_getlocal1);
callInContext.addInstruction(ABCConstants.OP_getlocal2);
callInContext.addInstruction(ABCConstants.OP_getlocal3);
callInContext.addInstruction(ABCConstants.OP_callproperty, new Object[] {applyReference.getMName(), 2});
callInContext.addInstruction(ABCConstants.OP_getlocal, 4);
Label callInContextReturnVoid = new Label();
callInContext.addInstruction(ABCConstants.OP_iffalse, callInContextReturnVoid);
callInContext.addInstruction(ABCConstants.OP_returnvalue);
callInContext.labelNext(callInContextReturnVoid);
// TODO This should be OP_returnvoid, but the Boolean default value
// for the 'returns' parameter isn't defaulting to true.
// Fix this after CMP-936 is fixed.
callInContext.addInstruction(ABCConstants.OP_returnvalue);
ImmutableList<Name> callInContextParams = new ImmutableList.Builder<Name>()
.add(new Name(IASLanguageConstants.Function))
.add(new Name(IASLanguageConstants.Object))
.add(new Name(IASLanguageConstants.Array))
.add(new Name(IASLanguageConstants.Boolean))
.build();
classGen.addITraitsMethod(new Name("callInContext"), callInContextParams, null,
Collections.<Object> singletonList(Boolean.TRUE), false, true, isOverride, callInContext);
}
/**
* Codegen IFlexModuleFactory.create() override public function create(...
* params):Object { var mainClass : Class; if (params.length <= 0) {
* mainClass = getlex MainApplicationClass } else if (params[0] is String) {
* mainClass = getDefinitionByName(params[0]) } else return
* super.create.apply(this, params); if (!mainClass) return null; var
* instance:Object = new mainClass(); if (instance is IFlexModule)
* (IFlexModule(instance)).moduleFactory = this; return instance; }
*
* @param classGen
* @param mainApplicationName {@link Name} that will refer to the main
* application class at runtme. May not be null but a library.swf for a SWC
* may pass in a {@link Name} that resolves to "Object" at runtime.
*/
protected final void codegenCreateMethod(ClassGeneratorHelper classGen, Name mainApplicationName, boolean isFlexSDKInfo)
{
IResolvedQualifiersReference applyReference = ReferenceFactory.resolvedQualifierQualifiedReference(royaleProject.getWorkspace(),
NamespaceDefinition.getAS3NamespaceDefinition(), "apply");
IResolvedQualifiersReference getDefinitionByNameReference =
ReferenceFactory.packageQualifiedReference(royaleProject.getWorkspace(), IASLanguageConstants.getDefinitionByName);
IResolvedQualifiersReference iFlexModule =
ReferenceFactory.packageQualifiedReference(royaleProject.getWorkspace(), IMXMLTypeConstants.IFlexModule);
boolean codegenIFlexModule = iFlexModule.resolve(royaleProject) != null && isFlexSDKInfo;
Name getDefinitionByName = getDefinitionByNameReference.getMName();
InstructionList create = new InstructionList();
create.addInstruction(ABCConstants.OP_getlocal1);
create.addInstruction(ABCConstants.OP_getproperty, new Name("length"));
create.addInstruction(ABCConstants.OP_pushbyte, 0);
Label createL1 = new Label();
create.addInstruction(ABCConstants.OP_ifgt, createL1);
create.addInstruction(ABCConstants.OP_findproperty, mainApplicationName);
create.addInstruction(ABCConstants.OP_getproperty, mainApplicationName);
Label createL3 = new Label();
create.addInstruction(ABCConstants.OP_jump, createL3);
create.labelNext(createL1);
create.addInstruction(ABCConstants.OP_getlocal1);
create.addInstruction(ABCConstants.OP_getproperty, new Name("0"));
create.addInstruction(ABCConstants.OP_istype, new Name("String"));
Label createL2 = new Label();
create.addInstruction(ABCConstants.OP_iffalse, createL2);
create.addInstruction(ABCConstants.OP_finddef, getDefinitionByName);
create.addInstruction(ABCConstants.OP_getlocal1);
create.addInstruction(ABCConstants.OP_getproperty, new Name("0"));
create.addInstruction(ABCConstants.OP_callproperty, new Object[] {getDefinitionByName, 1});
create.addInstruction(ABCConstants.OP_jump, createL3);
create.labelNext(createL2);
create.addInstruction(ABCConstants.OP_getlocal0);
create.addInstruction(ABCConstants.OP_getsuper, new Name("create"));
create.addInstruction(ABCConstants.OP_getlocal0);
create.addInstruction(ABCConstants.OP_getlocal1);
create.addInstruction(ABCConstants.OP_callproperty, new Object[] {applyReference.getMName(), 2});
create.addInstruction(ABCConstants.OP_returnvalue);
create.labelNext(createL3);
create.addInstruction(ABCConstants.OP_astype, new Name("Class"));
create.addInstruction(ABCConstants.OP_dup);
Label createL5 = new Label();
create.addInstruction(ABCConstants.OP_iffalse, createL5);
create.addInstruction(ABCConstants.OP_construct, 0);
if (codegenIFlexModule)
{
create.addInstruction(ABCConstants.OP_dup);
create.addInstruction(ABCConstants.OP_istype, iFlexModule.getMName());
Label createL4 = new Label();
create.addInstruction(ABCConstants.OP_iffalse, createL4);
create.addInstruction(ABCConstants.OP_dup);
create.addInstruction(ABCConstants.OP_getlocal0);
create.addInstruction(ABCConstants.OP_setproperty, new Name("moduleFactory"));
create.labelNext(createL4);
create.addInstruction(ABCConstants.OP_returnvalue);
}
create.labelNext(createL5);
create.addInstruction(ABCConstants.OP_returnvalue);
classGen.addITraitsMethod(new Name("create"), Collections.<Name> emptyList(),
new Name("Object"), Collections.emptyList(), true, true, true, create);
}
/**
* Codegen the IFlexModuleFactory.info() method.
*
* public override final function info() : Object
* {
* if (!_info)
* {
* _info = {
* currentDomain : ApplicationDomain.currentDomain
* .
* .
* .
* }
* }
* }
*
* @param classGen used to create the function
* @param compatibilityVersion compatibility version set in the compiler option.
* May be null if not configured.
* @param mainClassQName qName of the main class of the application. May be null
* of library swfs.
* @param preloaderReference reference to the configured preloader class.
* May be null for library swfs.
* @param runtimeDPIProviderReference configured runtimeDPIProvider. May be null
* for library swfs.
* @param splashScreen reference to the configured splash screen.
* @param rootNode root node of the application. May be null for library swfs.
* @param compiledLocales the locales supported by this application. May be null for
* library swfs.
* @param rsls legacy RSLs. May be null for library swfs.
* @param problemCollection problems found when generating the info method
* are added to the collection.
* @param remoteClassAliasMap
* @throws InterruptedException
*/
protected final void codegenInfoMethod(ClassGeneratorHelper classGen,
Integer compatibilityVersion,
String mainClassQName,
IResolvedQualifiersReference preloaderReference,
IResolvedQualifiersReference runtimeDPIProviderReference,
FlexSplashScreenImage splashScreen,
IASNode rootNode,
ITargetAttributes targetAttributes,
Collection<String> compiledLocales,
RoyaleFrame1Info frame1Info,
Set<String> accessibilityClassNames,
String royaleInitClassName,
String stylesClassName,
List<String> rsls,
FlexRSLInfo rslInfo,
Collection<ICompilerProblem> problemCollection,
boolean isAppFlexInfo,
boolean isFlexSDKInfo,
Map<ClassDefinition, String> remoteClassAliasMap)
throws InterruptedException
{
IResolvedQualifiersReference applicationDomainRef = ReferenceFactory.packageQualifiedReference(royaleProject.getWorkspace(),
IASLanguageConstants.ApplicationDomain);
IResolvedQualifiersReference infoSlotReference;
if (isAppFlexInfo)
{
NamespaceDefinition.IStaticProtectedNamespaceDefinition staticNSDef = NamespaceDefinition.createStaticProtectedNamespaceDefinition("");
infoSlotReference = ReferenceFactory.resolvedQualifierQualifiedReference(royaleProject.getWorkspace(),
staticNSDef, "_info");
}
else
{
NamespaceDefinition.IPrivateNamespaceDefinition privateNSDef = NamespaceDefinition.createPrivateNamespaceDefinition("");
infoSlotReference = ReferenceFactory.resolvedQualifierQualifiedReference(royaleProject.getWorkspace(),
privateNSDef, "info");
}
Name infoSlotName = infoSlotReference.getMName();
InstructionList info = new InstructionList();
info.addInstruction(ABCConstants.OP_getlocal0);
info.addInstruction(ABCConstants.OP_getproperty, infoSlotName);
info.addInstruction(ABCConstants.OP_dup);
Label infoL1 = new Label();
info.addInstruction(ABCConstants.OP_iftrue, infoL1);
int infoEntries = 0;
// currentDomain:
info.addInstruction(ABCConstants.OP_pop);
info.addInstruction(ABCConstants.OP_pushstring, "currentDomain");
info.addInstruction(ABCConstants.OP_getlex, applicationDomainRef.getMName());
info.addInstruction(ABCConstants.OP_getproperty, new Name("currentDomain"));
infoEntries++;
// frames:
if (targetSettings.getFrameLabels() != null && !targetSettings.getFrameLabels().isEmpty())
{
Collection<FrameInfo> frames = targetSettings.getFrameLabels();
info.addInstruction(ABCConstants.OP_pushstring, "frames");
for (FrameInfo frame : frames)
{
info.addInstruction(ABCConstants.OP_pushstring, frame.getLabel());
info.addInstruction(ABCConstants.OP_convert_s);
info.addInstruction(ABCConstants.OP_pushstring, frame.getFrameClasses().get(0));
}
info.addInstruction(ABCConstants.OP_newobject, frames.size());
infoEntries++;
}
// royaleVersion:
if (compatibilityVersion != null)
{
info.addInstruction(ABCConstants.OP_pushstring, "royaleVersion");
info.addInstruction(ABCConstants.OP_pushstring, compatibilityVersion);
infoEntries++;
}
// mark this SWF as being built with Royale
info.addInstruction(ABCConstants.OP_pushstring, "isMXMLC");
info.addInstruction(ABCConstants.OP_pushfalse);
infoEntries++;
// mainClassName:
if (mainClassQName != null)
{
info.addInstruction(ABCConstants.OP_pushstring, "mainClassName");
info.addInstruction(ABCConstants.OP_pushstring, mainClassQName);
infoEntries++;
}
if (!isAppFlexInfo && isFlexSDKInfo)
{
// preloader:
if (preloaderReference != null && preloaderReference.resolve(royaleProject) != null)
{
info.addInstruction(ABCConstants.OP_pushstring, ATTRIBUTE_PRELOADER);
info.addInstruction(ABCConstants.OP_getlex, preloaderReference.getMName());
infoEntries++;
}
// runtimeDPIProvider:
if (runtimeDPIProviderReference != null && runtimeDPIProviderReference.resolve(royaleProject) != null)
{
info.addInstruction(ABCConstants.OP_pushstring, ATTRIBUTE_RUNTIME_DPI_PROVIDER);
info.addInstruction(ABCConstants.OP_getlex, runtimeDPIProviderReference.getMName());
infoEntries++;
}
// splashScreenImage:
if (splashScreen.generatedEmbedClassReference != null)
{
info.addInstruction(ABCConstants.OP_pushstring, ATTRIBUTE_SPLASH_SCREEN_IMAGE);
info.addInstruction(ABCConstants.OP_getlex, splashScreen.generatedEmbedClassReference.getMName());
infoEntries++;
}
// Add various root node attributes:
infoEntries += codegenRootNodeAttributes(targetAttributes, info, rootNode, problemCollection);
// compiledLocales:
if (compiledLocales != null)
{
info.addInstruction(ABCConstants.OP_pushstring, "compiledLocales");
for(String locale : compiledLocales)
info.addInstruction(ABCConstants.OP_pushstring, locale);
info.addInstruction(ABCConstants.OP_newarray, compiledLocales.size());
infoEntries++;
}
// compiledResourceBundleNames:
if (!frame1Info.compiledResourceBundleNames.isEmpty())
{
info.addInstruction(ABCConstants.OP_pushstring, "compiledResourceBundleNames");
for(String bundleName : frame1Info.compiledResourceBundleNames)
info.addInstruction(ABCConstants.OP_pushstring, bundleName);
info.addInstruction(ABCConstants.OP_newarray, frame1Info.compiledResourceBundleNames.size());
infoEntries++;
}
}
// styleDataClassName
if (stylesClassName != null)
{
info.addInstruction(ABCConstants.OP_pushstring, "styleDataClassName");
info.addInstruction(ABCConstants.OP_pushstring, stylesClassName);
infoEntries++;
}
// mixins:
if (royaleInitClassName != null)
{
info.addInstruction(ABCConstants.OP_pushstring, "mixins");
info.addInstruction(ABCConstants.OP_pushstring, royaleInitClassName);
int mixinEntries = 1;
final Set<String> mixinClassNames = frame1Info.getMixins();
for (String className : frame1Info.getMixins())
info.addInstruction(ABCConstants.OP_pushstring, className);
mixinEntries += mixinClassNames.size();
info.addInstruction(ABCConstants.OP_newarray, mixinEntries);
infoEntries++;
}
if (remoteClassAliasMap != null && !remoteClassAliasMap.isEmpty())
{
info.addInstruction(ABCConstants.OP_pushstring, "remoteClassAliases");
for (Map.Entry<ClassDefinition, String> classAliasEntry : remoteClassAliasMap.entrySet())
{
info.addInstruction(ABCConstants.OP_pushstring, classAliasEntry.getKey().getQualifiedName());
info.addInstruction(ABCConstants.OP_convert_s);
String value = classAliasEntry.getValue();
info.addInstruction(ABCConstants.OP_pushstring, value);
}
info.addInstruction(ABCConstants.OP_newobject, remoteClassAliasMap.size());
infoEntries++;
}
// fonts:
if (!frame1Info.embeddedFonts.isEmpty())
{
info.addInstruction(ABCConstants.OP_pushstring, "fonts");
for (Entry<String, RoyaleFontInfo> entry : frame1Info.embeddedFonts.entrySet())
{
info.addInstruction(ABCConstants.OP_pushstring, entry.getKey());
info.addInstruction(ABCConstants.OP_convert_s);
RoyaleFontInfo fontInfo = entry.getValue();
info.addInstruction(ABCConstants.OP_pushstring, "regular");
info.addInstruction(fontInfo.regularOp);
info.addInstruction(ABCConstants.OP_pushstring, "bold");
info.addInstruction(fontInfo.boldOp);
info.addInstruction(ABCConstants.OP_pushstring, "italic");
info.addInstruction(fontInfo.italicOp);
info.addInstruction(ABCConstants.OP_pushstring, "boldItalic");
info.addInstruction(fontInfo.boldItalicOp);
info.addInstruction(ABCConstants.OP_newobject, 4);
}
info.addInstruction(ABCConstants.OP_newobject, frame1Info.embeddedFonts.size());
infoEntries++;
}
// accessibility classes
if (accessibilityClassNames != null && accessibilityClassNames.size() > 0)
{
info.addInstruction(ABCConstants.OP_pushstring, "accessibilityClassNames");
for (String className : accessibilityClassNames)
info.addInstruction(ABCConstants.OP_pushstring, className);
info.addInstruction(ABCConstants.OP_newarray, accessibilityClassNames.size());
infoEntries++;
}
// cdRSLs and placeholderRSLs:
if (!rslInfo.requiredRSLs.isEmpty())
{
// Note: The Flex framework spells this info property as Rsl, not RSL.
if (codegenRSLsEntry(info, problemCollection, "cdRsls", rslInfo.requiredRSLs))
infoEntries++;
// Note: The Flex framework spells this info property as Rsl, not RSL.
if (codegenRSLsEntry(info, problemCollection, "placeholderRsls", rslInfo.placeholderRSLs))
infoEntries++;
}
// rsls:
if (codegenLegacyRSLs(info, rsls))
infoEntries++;
// Create a new info object from all of the entries.
info.addInstruction(ABCConstants.OP_newobject, infoEntries);
info.addInstruction(ABCConstants.OP_dup);
info.addInstruction(ABCConstants.OP_getlocal0);
info.addInstruction(ABCConstants.OP_swap);
info.addInstruction(ABCConstants.OP_setproperty, infoSlotName);
info.labelNext(infoL1);
info.addInstruction(ABCConstants.OP_returnvalue);
ITraitsVisitor itraitsVisitor;
ITraitVisitor infoSlotVisitor;
if (isAppFlexInfo)
{
classGen.addCTraitsMethod(new Name("info"), Collections.<Name> emptyList(),
new Name("Object"), Collections.emptyList(), false, info);
itraitsVisitor = classGen.getCTraitsVisitor();
infoSlotVisitor = itraitsVisitor.visitSlotTrait(ABCConstants.TRAIT_Var, infoSlotName,
ITraitsVisitor.RUNTIME_SLOT, new Name(IASLanguageConstants.Object), LexicalScope.noInitializer);
}
else
{
classGen.addITraitsMethod(new Name("info"), Collections.<Name> emptyList(),
new Name("Object"), Collections.emptyList(), false, true, true, info);
itraitsVisitor = classGen.getITraitsVisitor();
infoSlotVisitor = itraitsVisitor.visitSlotTrait(ABCConstants.TRAIT_Var, infoSlotName,
ITraitsVisitor.RUNTIME_SLOT, new Name(IASLanguageConstants.Object), LexicalScope.noInitializer);
}
infoSlotVisitor.visitStart();
infoSlotVisitor.visitEnd();
}
/**
* Generate code to add root node attributes to the info object that haven't
* already been handled.
*
* @param info
* @return number of entries added to the info object
* @throws InterruptedException
*/
private int codegenRootNodeAttributes(ITargetAttributes targetAttributes, InstructionList info,
IASNode rootNode,
Collection<ICompilerProblem> problemCollection) throws InterruptedException
{
// Number of attributes added to the "info" object.
int entries = 0;
// Emit "info" attributes that don't require special processing.
final Map<String, String> attributes = targetAttributes.getRootInfoAttributes();
for (final Map.Entry<String, String> e : attributes.entrySet())
{
info.addInstruction(ABCConstants.OP_pushstring, e.getKey());
info.addInstruction(ABCConstants.OP_pushstring, e.getValue());
entries++;
}
// Emit "info.usePreloader" as a boolean value.
final Boolean usePreloader = targetAttributes.getUsePreloader();
if (usePreloader != null)
{
info.addInstruction(ABCConstants.OP_pushstring, ATTRIBUTE_USE_PRELOADER);
info.addInstruction(usePreloader ? ABCConstants.OP_pushtrue : ABCConstants.OP_pushfalse);
entries++;
}
// Emit "info.backgroundColor" as a hex string value.
final String backgroundColorString = targetAttributes.getBackgroundColor();
if (backgroundColorString != null)
{
try
{
final int backgroundColor = royaleProject.getColorAsInt(backgroundColorString);
final String hexString = "0x" + Integer.toHexString(backgroundColor).toUpperCase();
info.addInstruction(ABCConstants.OP_pushstring, ATTRIBUTE_BACKGROUND_COLOR);
info.addInstruction(ABCConstants.OP_pushstring, hexString);
entries++;
}
catch (NumberFormatException numberFormatExpression)
{
problemCollection.add(new InvalidBackgroundColorProblem(
rootNode.getFileSpecification().getPath(),
backgroundColorString));
}
}
return entries;
}
/**
* Generate either the "cdRsls" or "placeholderRsls" entry in the
* IFlexModuleFactory info function.
*
* @param info
* @param problemCollection
* @param entryLabel the "info" entry label
* @param rslSettingsList the rsls for the entry
*
* @return true if instructions were added for the RSLs, false otherwise.
*/
private boolean codegenRSLsEntry(InstructionList info, Collection<ICompilerProblem> problemCollection,
String entryLabel,
List<RSLSettings> rslSettingsList)
{
ISWCManager swcManager = royaleProject.getWorkspace().getSWCManager();
info.addInstruction(ABCConstants.OP_pushstring, entryLabel);
IResolvedQualifiersReference mxCoreRSLDataReference =
ReferenceFactory.packageQualifiedReference(royaleProject.getWorkspace(), IMXMLTypeConstants.RSLData);
Name mxCoreRSLDataSlotName = mxCoreRSLDataReference.getMName();
Object[] mxCoreRSLDataCtor = new Object[] { mxCoreRSLDataSlotName, 7 };
// Add an RSLData instance to an array for the primary RSL in RSLSettings
// plus one for every failover RSL.
for (RSLSettings rslSettings : rslSettingsList)
{
int rslCount = 0;
for (RSLAndPolicyFileURLPair urls : rslSettings.getRSLURLs())
{
info.addInstruction(ABCConstants.OP_findpropstrict, mxCoreRSLDataSlotName);
info.addInstruction(ABCConstants.OP_pushstring, urls.getRSLURL());
info.addInstruction(ABCConstants.OP_pushstring, urls.getPolicyFileURL());
ISWC swc = swcManager.get(new File(rslSettings.getLibraryFile().getPath()));
boolean isSignedRSL = RSLSettings.isSignedRSL(urls.getRSLURL());
ISWCDigest swcDigest = getSWCDigest(Iterables.getFirst(swc.getLibraries(), null),
isSignedRSL);
if (swcDigest == null)
{
if (isSignedRSL)
problemCollection.add(new MissingSignedDigestProblem(swc.getSWCFile().getAbsolutePath()));
else
problemCollection.add(new MissingUnsignedDigestProblem(swc.getSWCFile().getAbsolutePath()));
continue;
}
info.addInstruction(ABCConstants.OP_pushstring, swcDigest.getValue());
info.addInstruction(ABCConstants.OP_pushstring, swcDigest.getType());
info.addInstruction(isSignedRSL ? ABCConstants.OP_pushtrue : ABCConstants.OP_pushfalse);
info.addInstruction(rslSettings.getVerifyDigest() ? ABCConstants.OP_pushtrue : ABCConstants.OP_pushfalse);
info.addInstruction(ABCConstants.OP_pushstring,
rslSettings.getApplicationDomain().getApplicationDomainValue());
info.addInstruction(ABCConstants.OP_constructprop, mxCoreRSLDataCtor);
rslCount++;
}
info.addInstruction(ABCConstants.OP_newarray, rslCount);
}
info.addInstruction(ABCConstants.OP_newarray, rslSettingsList.size());
return true;
}
/**
* Get either a signed or unsigned digest from the library.
*
* @param swcLibrary
* @param signedDigest if true return a signed digest, otherwise an unsigned digest.
* @return A signed or unsigned digest. Null if the requested digest is not
* available.
*/
private ISWCDigest getSWCDigest(ISWCLibrary swcLibrary, boolean signedDigest)
{
for (ISWCDigest swcDigest : swcLibrary.getDigests())
{
if (swcDigest.isSigned() == signedDigest)
return swcDigest;
}
return null;
}
/**
* rsls: [{url: "rsl1.swf", size: -1}, {url: "rsl2.swf", size: -1}, {url: "rsl3.swf", size: -1}]
*
* @param info
* @return
*/
private boolean codegenLegacyRSLs(InstructionList info, List<String> rsls)
{
if (rsls != null && rsls.size() > 0)
{
info.addInstruction(ABCConstants.OP_pushstring, "rsls");
for (String rsl : rsls)
{
info.addInstruction(ABCConstants.OP_pushstring, "url");
info.addInstruction(ABCConstants.OP_pushstring, rsl);
info.addInstruction(ABCConstants.OP_pushstring, "size");
info.addInstruction(ABCConstants.OP_pushbyte, -1);
info.addInstruction(ABCConstants.OP_newobject, 2);
}
info.addInstruction(ABCConstants.OP_newarray, rsls.size());
}
return rsls != null && rsls.size() > 0;
}
static final Target.DirectDependencies NO_DEPENDENCIES =
new Target.DirectDependencies(Collections.<ICompilationUnit>emptyList(), Collections.<ICompilerProblem>emptyList());
/**
* Find the accessible compilation units on a set of compilation units. The
* accessible compilation units are gathered from the
* {@code [AccessiblityClass]} meta-data on a class.
* <p>
* Unfortunately this method has a side effect, it updates the set of
* accessibility classes.
*
* @param compilationUnit A compilation unit.
* @return A {@link DirectDependencies} object with the set of compilation
* units representing the new compilation units needed for accessibility. If
* no accessible compilation units are found or accessibility is not enabled
* on the target, then the returned {@link DirectDependencies} object will
* contain an empty set.
*/
public Target.DirectDependencies getAccessibilityDependencies(ICompilationUnit compilationUnit) throws InterruptedException
{
assert targetSettings.isAccessible() : "This method should only be called if accessibility is enabled!";
Set<ICompilationUnit> accessibleCompilationUnits = new HashSet<ICompilationUnit>();
IFileScopeRequestResult result = compilationUnit.getFileScopeRequest().get();
for(IDefinition def : result.getExternallyVisibleDefinitions())
{
IMetaTag md = def.getMetaTagByName(IMetaAttributeConstants.ATTRIBUTE_ACCESSIBIlITY_CLASS);
if (md == null)
continue;
String accessibilityClass = md.getAttributeValue(IMetaAttributeConstants.NAME_ACCESSIBILITY_IMPLEMENTATION);
if (accessibilityClass == null)
continue;
IResolvedQualifiersReference ref = ReferenceFactory.packageQualifiedReference(royaleProject.getWorkspace(),
accessibilityClass);
assert ref != null;
// Collect a list of classes to add to the info structure.
accessibleClassNames.add(accessibilityClass);
IDefinition accessibilityClassDefinition = ref.resolve(royaleProject);
if ((accessibilityClassDefinition != null) && (!accessibilityClassDefinition.isImplicit()))
{
ICompilationUnit cu = royaleProject.getScope().getCompilationUnitForDefinition(accessibilityClassDefinition);
assert cu != null : "Unable to find compilation unit for definition!";
accessibleCompilationUnits.add(cu);
}
}
return new Target.DirectDependencies(accessibleCompilationUnits, Collections.<ICompilerProblem>emptyList());
}
/**
* Update the SWF model by adding a ProductInfoTag.
*
* @param swf the swf model to update.
*/
public void addProductInfoToSWF(ISWF swf)
{
long compileDate = new Date().getTime();
String rdfDate = targetSettings.getSWFMetadataDate();
String rdfDateFormat = targetSettings.getSWFMetadataDateFormat();
if (rdfDate != null && rdfDateFormat != null)
{
try {
SimpleDateFormat sdf = new SimpleDateFormat(rdfDateFormat);
sdf.setTimeZone(TimeZone.getTimeZone("UTC"));
compileDate = sdf.parse(rdfDate).getTime();
} catch (ParseException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IllegalArgumentException e1) {
e1.printStackTrace();
}
}
// Add product info to the swf.
ProductInfoTag productInfo = new ProductInfoTag(Product.ROYALE,
Edition.NONE,
(byte)Integer.parseInt(VersionInfo.FLEX_MAJOR_VERSION),
(byte)Integer.parseInt(VersionInfo.FLEX_MINOR_VERSION),
VersionInfo.getBuildLong(),
compileDate);
swf.setProductInfo(productInfo);
}
}
|
googleapis/google-cloud-java | 35,922 | java-dataflow/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/JobMessage.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/dataflow/v1beta3/messages.proto
// Protobuf Java Version: 3.25.8
package com.google.dataflow.v1beta3;
/**
*
*
* <pre>
* A particular message pertaining to a Dataflow job.
* </pre>
*
* Protobuf type {@code google.dataflow.v1beta3.JobMessage}
*/
public final class JobMessage extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.dataflow.v1beta3.JobMessage)
JobMessageOrBuilder {
private static final long serialVersionUID = 0L;
// Use JobMessage.newBuilder() to construct.
private JobMessage(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private JobMessage() {
id_ = "";
messageText_ = "";
messageImportance_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new JobMessage();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.dataflow.v1beta3.MessagesProto
.internal_static_google_dataflow_v1beta3_JobMessage_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.dataflow.v1beta3.MessagesProto
.internal_static_google_dataflow_v1beta3_JobMessage_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.dataflow.v1beta3.JobMessage.class,
com.google.dataflow.v1beta3.JobMessage.Builder.class);
}
private int bitField0_;
public static final int ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object id_ = "";
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string id = 1;</code>
*
* @return The id.
*/
@java.lang.Override
public java.lang.String getId() {
java.lang.Object ref = id_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
id_ = s;
return s;
}
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string id = 1;</code>
*
* @return The bytes for id.
*/
@java.lang.Override
public com.google.protobuf.ByteString getIdBytes() {
java.lang.Object ref = id_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
id_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TIME_FIELD_NUMBER = 2;
private com.google.protobuf.Timestamp time_;
/**
*
*
* <pre>
* The timestamp of the message.
* </pre>
*
* <code>.google.protobuf.Timestamp time = 2;</code>
*
* @return Whether the time field is set.
*/
@java.lang.Override
public boolean hasTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The timestamp of the message.
* </pre>
*
* <code>.google.protobuf.Timestamp time = 2;</code>
*
* @return The time.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getTime() {
return time_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : time_;
}
/**
*
*
* <pre>
* The timestamp of the message.
* </pre>
*
* <code>.google.protobuf.Timestamp time = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getTimeOrBuilder() {
return time_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : time_;
}
public static final int MESSAGE_TEXT_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object messageText_ = "";
/**
*
*
* <pre>
* The text of the message.
* </pre>
*
* <code>string message_text = 3;</code>
*
* @return The messageText.
*/
@java.lang.Override
public java.lang.String getMessageText() {
java.lang.Object ref = messageText_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
messageText_ = s;
return s;
}
}
/**
*
*
* <pre>
* The text of the message.
* </pre>
*
* <code>string message_text = 3;</code>
*
* @return The bytes for messageText.
*/
@java.lang.Override
public com.google.protobuf.ByteString getMessageTextBytes() {
java.lang.Object ref = messageText_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
messageText_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int MESSAGE_IMPORTANCE_FIELD_NUMBER = 4;
private int messageImportance_ = 0;
/**
*
*
* <pre>
* Importance level of the message.
* </pre>
*
* <code>.google.dataflow.v1beta3.JobMessageImportance message_importance = 4;</code>
*
* @return The enum numeric value on the wire for messageImportance.
*/
@java.lang.Override
public int getMessageImportanceValue() {
return messageImportance_;
}
/**
*
*
* <pre>
* Importance level of the message.
* </pre>
*
* <code>.google.dataflow.v1beta3.JobMessageImportance message_importance = 4;</code>
*
* @return The messageImportance.
*/
@java.lang.Override
public com.google.dataflow.v1beta3.JobMessageImportance getMessageImportance() {
com.google.dataflow.v1beta3.JobMessageImportance result =
com.google.dataflow.v1beta3.JobMessageImportance.forNumber(messageImportance_);
return result == null ? com.google.dataflow.v1beta3.JobMessageImportance.UNRECOGNIZED : result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, id_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getTime());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(messageText_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, messageText_);
}
if (messageImportance_
!= com.google.dataflow.v1beta3.JobMessageImportance.JOB_MESSAGE_IMPORTANCE_UNKNOWN
.getNumber()) {
output.writeEnum(4, messageImportance_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, id_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getTime());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(messageText_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, messageText_);
}
if (messageImportance_
!= com.google.dataflow.v1beta3.JobMessageImportance.JOB_MESSAGE_IMPORTANCE_UNKNOWN
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(4, messageImportance_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.dataflow.v1beta3.JobMessage)) {
return super.equals(obj);
}
com.google.dataflow.v1beta3.JobMessage other = (com.google.dataflow.v1beta3.JobMessage) obj;
if (!getId().equals(other.getId())) return false;
if (hasTime() != other.hasTime()) return false;
if (hasTime()) {
if (!getTime().equals(other.getTime())) return false;
}
if (!getMessageText().equals(other.getMessageText())) return false;
if (messageImportance_ != other.messageImportance_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + ID_FIELD_NUMBER;
hash = (53 * hash) + getId().hashCode();
if (hasTime()) {
hash = (37 * hash) + TIME_FIELD_NUMBER;
hash = (53 * hash) + getTime().hashCode();
}
hash = (37 * hash) + MESSAGE_TEXT_FIELD_NUMBER;
hash = (53 * hash) + getMessageText().hashCode();
hash = (37 * hash) + MESSAGE_IMPORTANCE_FIELD_NUMBER;
hash = (53 * hash) + messageImportance_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.dataflow.v1beta3.JobMessage parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.dataflow.v1beta3.JobMessage parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.dataflow.v1beta3.JobMessage parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.dataflow.v1beta3.JobMessage parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.dataflow.v1beta3.JobMessage parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.dataflow.v1beta3.JobMessage parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.dataflow.v1beta3.JobMessage parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.dataflow.v1beta3.JobMessage parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.dataflow.v1beta3.JobMessage parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.dataflow.v1beta3.JobMessage parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.dataflow.v1beta3.JobMessage parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.dataflow.v1beta3.JobMessage parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.dataflow.v1beta3.JobMessage prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A particular message pertaining to a Dataflow job.
* </pre>
*
* Protobuf type {@code google.dataflow.v1beta3.JobMessage}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.dataflow.v1beta3.JobMessage)
com.google.dataflow.v1beta3.JobMessageOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.dataflow.v1beta3.MessagesProto
.internal_static_google_dataflow_v1beta3_JobMessage_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.dataflow.v1beta3.MessagesProto
.internal_static_google_dataflow_v1beta3_JobMessage_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.dataflow.v1beta3.JobMessage.class,
com.google.dataflow.v1beta3.JobMessage.Builder.class);
}
// Construct using com.google.dataflow.v1beta3.JobMessage.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getTimeFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
id_ = "";
time_ = null;
if (timeBuilder_ != null) {
timeBuilder_.dispose();
timeBuilder_ = null;
}
messageText_ = "";
messageImportance_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.dataflow.v1beta3.MessagesProto
.internal_static_google_dataflow_v1beta3_JobMessage_descriptor;
}
@java.lang.Override
public com.google.dataflow.v1beta3.JobMessage getDefaultInstanceForType() {
return com.google.dataflow.v1beta3.JobMessage.getDefaultInstance();
}
@java.lang.Override
public com.google.dataflow.v1beta3.JobMessage build() {
com.google.dataflow.v1beta3.JobMessage result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.dataflow.v1beta3.JobMessage buildPartial() {
com.google.dataflow.v1beta3.JobMessage result =
new com.google.dataflow.v1beta3.JobMessage(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.dataflow.v1beta3.JobMessage result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.id_ = id_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.time_ = timeBuilder_ == null ? time_ : timeBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.messageText_ = messageText_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.messageImportance_ = messageImportance_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.dataflow.v1beta3.JobMessage) {
return mergeFrom((com.google.dataflow.v1beta3.JobMessage) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.dataflow.v1beta3.JobMessage other) {
if (other == com.google.dataflow.v1beta3.JobMessage.getDefaultInstance()) return this;
if (!other.getId().isEmpty()) {
id_ = other.id_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasTime()) {
mergeTime(other.getTime());
}
if (!other.getMessageText().isEmpty()) {
messageText_ = other.messageText_;
bitField0_ |= 0x00000004;
onChanged();
}
if (other.messageImportance_ != 0) {
setMessageImportanceValue(other.getMessageImportanceValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
id_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getTimeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
messageText_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 32:
{
messageImportance_ = input.readEnum();
bitField0_ |= 0x00000008;
break;
} // case 32
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object id_ = "";
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string id = 1;</code>
*
* @return The id.
*/
public java.lang.String getId() {
java.lang.Object ref = id_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
id_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string id = 1;</code>
*
* @return The bytes for id.
*/
public com.google.protobuf.ByteString getIdBytes() {
java.lang.Object ref = id_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
id_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string id = 1;</code>
*
* @param value The id to set.
* @return This builder for chaining.
*/
public Builder setId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
id_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string id = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearId() {
id_ = getDefaultInstance().getId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string id = 1;</code>
*
* @param value The bytes for id to set.
* @return This builder for chaining.
*/
public Builder setIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
id_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.Timestamp time_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
timeBuilder_;
/**
*
*
* <pre>
* The timestamp of the message.
* </pre>
*
* <code>.google.protobuf.Timestamp time = 2;</code>
*
* @return Whether the time field is set.
*/
public boolean hasTime() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The timestamp of the message.
* </pre>
*
* <code>.google.protobuf.Timestamp time = 2;</code>
*
* @return The time.
*/
public com.google.protobuf.Timestamp getTime() {
if (timeBuilder_ == null) {
return time_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : time_;
} else {
return timeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The timestamp of the message.
* </pre>
*
* <code>.google.protobuf.Timestamp time = 2;</code>
*/
public Builder setTime(com.google.protobuf.Timestamp value) {
if (timeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
time_ = value;
} else {
timeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The timestamp of the message.
* </pre>
*
* <code>.google.protobuf.Timestamp time = 2;</code>
*/
public Builder setTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (timeBuilder_ == null) {
time_ = builderForValue.build();
} else {
timeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The timestamp of the message.
* </pre>
*
* <code>.google.protobuf.Timestamp time = 2;</code>
*/
public Builder mergeTime(com.google.protobuf.Timestamp value) {
if (timeBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& time_ != null
&& time_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
getTimeBuilder().mergeFrom(value);
} else {
time_ = value;
}
} else {
timeBuilder_.mergeFrom(value);
}
if (time_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The timestamp of the message.
* </pre>
*
* <code>.google.protobuf.Timestamp time = 2;</code>
*/
public Builder clearTime() {
bitField0_ = (bitField0_ & ~0x00000002);
time_ = null;
if (timeBuilder_ != null) {
timeBuilder_.dispose();
timeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The timestamp of the message.
* </pre>
*
* <code>.google.protobuf.Timestamp time = 2;</code>
*/
public com.google.protobuf.Timestamp.Builder getTimeBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The timestamp of the message.
* </pre>
*
* <code>.google.protobuf.Timestamp time = 2;</code>
*/
public com.google.protobuf.TimestampOrBuilder getTimeOrBuilder() {
if (timeBuilder_ != null) {
return timeBuilder_.getMessageOrBuilder();
} else {
return time_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : time_;
}
}
/**
*
*
* <pre>
* The timestamp of the message.
* </pre>
*
* <code>.google.protobuf.Timestamp time = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getTimeFieldBuilder() {
if (timeBuilder_ == null) {
timeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getTime(), getParentForChildren(), isClean());
time_ = null;
}
return timeBuilder_;
}
private java.lang.Object messageText_ = "";
/**
*
*
* <pre>
* The text of the message.
* </pre>
*
* <code>string message_text = 3;</code>
*
* @return The messageText.
*/
public java.lang.String getMessageText() {
java.lang.Object ref = messageText_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
messageText_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The text of the message.
* </pre>
*
* <code>string message_text = 3;</code>
*
* @return The bytes for messageText.
*/
public com.google.protobuf.ByteString getMessageTextBytes() {
java.lang.Object ref = messageText_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
messageText_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The text of the message.
* </pre>
*
* <code>string message_text = 3;</code>
*
* @param value The messageText to set.
* @return This builder for chaining.
*/
public Builder setMessageText(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
messageText_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The text of the message.
* </pre>
*
* <code>string message_text = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearMessageText() {
messageText_ = getDefaultInstance().getMessageText();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The text of the message.
* </pre>
*
* <code>string message_text = 3;</code>
*
* @param value The bytes for messageText to set.
* @return This builder for chaining.
*/
public Builder setMessageTextBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
messageText_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private int messageImportance_ = 0;
/**
*
*
* <pre>
* Importance level of the message.
* </pre>
*
* <code>.google.dataflow.v1beta3.JobMessageImportance message_importance = 4;</code>
*
* @return The enum numeric value on the wire for messageImportance.
*/
@java.lang.Override
public int getMessageImportanceValue() {
return messageImportance_;
}
/**
*
*
* <pre>
* Importance level of the message.
* </pre>
*
* <code>.google.dataflow.v1beta3.JobMessageImportance message_importance = 4;</code>
*
* @param value The enum numeric value on the wire for messageImportance to set.
* @return This builder for chaining.
*/
public Builder setMessageImportanceValue(int value) {
messageImportance_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Importance level of the message.
* </pre>
*
* <code>.google.dataflow.v1beta3.JobMessageImportance message_importance = 4;</code>
*
* @return The messageImportance.
*/
@java.lang.Override
public com.google.dataflow.v1beta3.JobMessageImportance getMessageImportance() {
com.google.dataflow.v1beta3.JobMessageImportance result =
com.google.dataflow.v1beta3.JobMessageImportance.forNumber(messageImportance_);
return result == null
? com.google.dataflow.v1beta3.JobMessageImportance.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Importance level of the message.
* </pre>
*
* <code>.google.dataflow.v1beta3.JobMessageImportance message_importance = 4;</code>
*
* @param value The messageImportance to set.
* @return This builder for chaining.
*/
public Builder setMessageImportance(com.google.dataflow.v1beta3.JobMessageImportance value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
messageImportance_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Importance level of the message.
* </pre>
*
* <code>.google.dataflow.v1beta3.JobMessageImportance message_importance = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearMessageImportance() {
bitField0_ = (bitField0_ & ~0x00000008);
messageImportance_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.dataflow.v1beta3.JobMessage)
}
// @@protoc_insertion_point(class_scope:google.dataflow.v1beta3.JobMessage)
private static final com.google.dataflow.v1beta3.JobMessage DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.dataflow.v1beta3.JobMessage();
}
public static com.google.dataflow.v1beta3.JobMessage getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<JobMessage> PARSER =
new com.google.protobuf.AbstractParser<JobMessage>() {
@java.lang.Override
public JobMessage parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<JobMessage> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<JobMessage> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.dataflow.v1beta3.JobMessage getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,028 | java-biglake/proto-google-cloud-biglake-v1/src/main/java/com/google/cloud/bigquery/biglake/v1/ListTablesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/biglake/v1/metastore.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.biglake.v1;
/**
*
*
* <pre>
* Response message for the ListTables method.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.biglake.v1.ListTablesResponse}
*/
public final class ListTablesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1.ListTablesResponse)
ListTablesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListTablesResponse.newBuilder() to construct.
private ListTablesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListTablesResponse() {
tables_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListTablesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_ListTablesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_ListTablesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.biglake.v1.ListTablesResponse.class,
com.google.cloud.bigquery.biglake.v1.ListTablesResponse.Builder.class);
}
public static final int TABLES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.bigquery.biglake.v1.Table> tables_;
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Table tables = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.bigquery.biglake.v1.Table> getTablesList() {
return tables_;
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Table tables = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.bigquery.biglake.v1.TableOrBuilder>
getTablesOrBuilderList() {
return tables_;
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Table tables = 1;</code>
*/
@java.lang.Override
public int getTablesCount() {
return tables_.size();
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Table tables = 1;</code>
*/
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.Table getTables(int index) {
return tables_.get(index);
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Table tables = 1;</code>
*/
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.TableOrBuilder getTablesOrBuilder(int index) {
return tables_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < tables_.size(); i++) {
output.writeMessage(1, tables_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < tables_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, tables_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.biglake.v1.ListTablesResponse)) {
return super.equals(obj);
}
com.google.cloud.bigquery.biglake.v1.ListTablesResponse other =
(com.google.cloud.bigquery.biglake.v1.ListTablesResponse) obj;
if (!getTablesList().equals(other.getTablesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getTablesCount() > 0) {
hash = (37 * hash) + TABLES_FIELD_NUMBER;
hash = (53 * hash) + getTablesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.biglake.v1.ListTablesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1.ListTablesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.ListTablesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1.ListTablesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.ListTablesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1.ListTablesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.ListTablesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1.ListTablesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.ListTablesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1.ListTablesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.ListTablesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1.ListTablesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.biglake.v1.ListTablesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for the ListTables method.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.biglake.v1.ListTablesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1.ListTablesResponse)
com.google.cloud.bigquery.biglake.v1.ListTablesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_ListTablesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_ListTablesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.biglake.v1.ListTablesResponse.class,
com.google.cloud.bigquery.biglake.v1.ListTablesResponse.Builder.class);
}
// Construct using com.google.cloud.bigquery.biglake.v1.ListTablesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (tablesBuilder_ == null) {
tables_ = java.util.Collections.emptyList();
} else {
tables_ = null;
tablesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_ListTablesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.ListTablesResponse getDefaultInstanceForType() {
return com.google.cloud.bigquery.biglake.v1.ListTablesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.ListTablesResponse build() {
com.google.cloud.bigquery.biglake.v1.ListTablesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.ListTablesResponse buildPartial() {
com.google.cloud.bigquery.biglake.v1.ListTablesResponse result =
new com.google.cloud.bigquery.biglake.v1.ListTablesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.bigquery.biglake.v1.ListTablesResponse result) {
if (tablesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
tables_ = java.util.Collections.unmodifiableList(tables_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.tables_ = tables_;
} else {
result.tables_ = tablesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.bigquery.biglake.v1.ListTablesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.biglake.v1.ListTablesResponse) {
return mergeFrom((com.google.cloud.bigquery.biglake.v1.ListTablesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1.ListTablesResponse other) {
if (other == com.google.cloud.bigquery.biglake.v1.ListTablesResponse.getDefaultInstance())
return this;
if (tablesBuilder_ == null) {
if (!other.tables_.isEmpty()) {
if (tables_.isEmpty()) {
tables_ = other.tables_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTablesIsMutable();
tables_.addAll(other.tables_);
}
onChanged();
}
} else {
if (!other.tables_.isEmpty()) {
if (tablesBuilder_.isEmpty()) {
tablesBuilder_.dispose();
tablesBuilder_ = null;
tables_ = other.tables_;
bitField0_ = (bitField0_ & ~0x00000001);
tablesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getTablesFieldBuilder()
: null;
} else {
tablesBuilder_.addAllMessages(other.tables_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.bigquery.biglake.v1.Table m =
input.readMessage(
com.google.cloud.bigquery.biglake.v1.Table.parser(), extensionRegistry);
if (tablesBuilder_ == null) {
ensureTablesIsMutable();
tables_.add(m);
} else {
tablesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.bigquery.biglake.v1.Table> tables_ =
java.util.Collections.emptyList();
private void ensureTablesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
tables_ = new java.util.ArrayList<com.google.cloud.bigquery.biglake.v1.Table>(tables_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1.Table,
com.google.cloud.bigquery.biglake.v1.Table.Builder,
com.google.cloud.bigquery.biglake.v1.TableOrBuilder>
tablesBuilder_;
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Table tables = 1;</code>
*/
public java.util.List<com.google.cloud.bigquery.biglake.v1.Table> getTablesList() {
if (tablesBuilder_ == null) {
return java.util.Collections.unmodifiableList(tables_);
} else {
return tablesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Table tables = 1;</code>
*/
public int getTablesCount() {
if (tablesBuilder_ == null) {
return tables_.size();
} else {
return tablesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Table tables = 1;</code>
*/
public com.google.cloud.bigquery.biglake.v1.Table getTables(int index) {
if (tablesBuilder_ == null) {
return tables_.get(index);
} else {
return tablesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Table tables = 1;</code>
*/
public Builder setTables(int index, com.google.cloud.bigquery.biglake.v1.Table value) {
if (tablesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTablesIsMutable();
tables_.set(index, value);
onChanged();
} else {
tablesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Table tables = 1;</code>
*/
public Builder setTables(
int index, com.google.cloud.bigquery.biglake.v1.Table.Builder builderForValue) {
if (tablesBuilder_ == null) {
ensureTablesIsMutable();
tables_.set(index, builderForValue.build());
onChanged();
} else {
tablesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Table tables = 1;</code>
*/
public Builder addTables(com.google.cloud.bigquery.biglake.v1.Table value) {
if (tablesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTablesIsMutable();
tables_.add(value);
onChanged();
} else {
tablesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Table tables = 1;</code>
*/
public Builder addTables(int index, com.google.cloud.bigquery.biglake.v1.Table value) {
if (tablesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTablesIsMutable();
tables_.add(index, value);
onChanged();
} else {
tablesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Table tables = 1;</code>
*/
public Builder addTables(com.google.cloud.bigquery.biglake.v1.Table.Builder builderForValue) {
if (tablesBuilder_ == null) {
ensureTablesIsMutable();
tables_.add(builderForValue.build());
onChanged();
} else {
tablesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Table tables = 1;</code>
*/
public Builder addTables(
int index, com.google.cloud.bigquery.biglake.v1.Table.Builder builderForValue) {
if (tablesBuilder_ == null) {
ensureTablesIsMutable();
tables_.add(index, builderForValue.build());
onChanged();
} else {
tablesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Table tables = 1;</code>
*/
public Builder addAllTables(
java.lang.Iterable<? extends com.google.cloud.bigquery.biglake.v1.Table> values) {
if (tablesBuilder_ == null) {
ensureTablesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, tables_);
onChanged();
} else {
tablesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Table tables = 1;</code>
*/
public Builder clearTables() {
if (tablesBuilder_ == null) {
tables_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
tablesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Table tables = 1;</code>
*/
public Builder removeTables(int index) {
if (tablesBuilder_ == null) {
ensureTablesIsMutable();
tables_.remove(index);
onChanged();
} else {
tablesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Table tables = 1;</code>
*/
public com.google.cloud.bigquery.biglake.v1.Table.Builder getTablesBuilder(int index) {
return getTablesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Table tables = 1;</code>
*/
public com.google.cloud.bigquery.biglake.v1.TableOrBuilder getTablesOrBuilder(int index) {
if (tablesBuilder_ == null) {
return tables_.get(index);
} else {
return tablesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Table tables = 1;</code>
*/
public java.util.List<? extends com.google.cloud.bigquery.biglake.v1.TableOrBuilder>
getTablesOrBuilderList() {
if (tablesBuilder_ != null) {
return tablesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(tables_);
}
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Table tables = 1;</code>
*/
public com.google.cloud.bigquery.biglake.v1.Table.Builder addTablesBuilder() {
return getTablesFieldBuilder()
.addBuilder(com.google.cloud.bigquery.biglake.v1.Table.getDefaultInstance());
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Table tables = 1;</code>
*/
public com.google.cloud.bigquery.biglake.v1.Table.Builder addTablesBuilder(int index) {
return getTablesFieldBuilder()
.addBuilder(index, com.google.cloud.bigquery.biglake.v1.Table.getDefaultInstance());
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Table tables = 1;</code>
*/
public java.util.List<com.google.cloud.bigquery.biglake.v1.Table.Builder>
getTablesBuilderList() {
return getTablesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1.Table,
com.google.cloud.bigquery.biglake.v1.Table.Builder,
com.google.cloud.bigquery.biglake.v1.TableOrBuilder>
getTablesFieldBuilder() {
if (tablesBuilder_ == null) {
tablesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1.Table,
com.google.cloud.bigquery.biglake.v1.Table.Builder,
com.google.cloud.bigquery.biglake.v1.TableOrBuilder>(
tables_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
tables_ = null;
}
return tablesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1.ListTablesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1.ListTablesResponse)
private static final com.google.cloud.bigquery.biglake.v1.ListTablesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1.ListTablesResponse();
}
public static com.google.cloud.bigquery.biglake.v1.ListTablesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListTablesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListTablesResponse>() {
@java.lang.Override
public ListTablesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListTablesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListTablesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.ListTablesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/java-photoslibrary | 36,105 | photoslibraryapi/src/main/java/com/google/photos/library/v1/proto/ListSharedAlbumsResponse.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/photos/library/v1/photos_library.proto
package com.google.photos.library.v1.proto;
/**
*
*
* <pre>
* List of shared albums requested.
* </pre>
*
* Protobuf type {@code google.photos.library.v1.ListSharedAlbumsResponse}
*/
public final class ListSharedAlbumsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.photos.library.v1.ListSharedAlbumsResponse)
ListSharedAlbumsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListSharedAlbumsResponse.newBuilder() to construct.
private ListSharedAlbumsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListSharedAlbumsResponse() {
sharedAlbums_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListSharedAlbumsResponse();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.photos.library.v1.proto.LibraryServiceProto
.internal_static_google_photos_library_v1_ListSharedAlbumsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.photos.library.v1.proto.LibraryServiceProto
.internal_static_google_photos_library_v1_ListSharedAlbumsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.photos.library.v1.proto.ListSharedAlbumsResponse.class,
com.google.photos.library.v1.proto.ListSharedAlbumsResponse.Builder.class);
}
public static final int SHARED_ALBUMS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.photos.types.proto.Album> sharedAlbums_;
/**
*
*
* <pre>
* Output only. List of shared albums.
* </pre>
*
* <code>repeated .google.photos.types.Album shared_albums = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.photos.types.proto.Album> getSharedAlbumsList() {
return sharedAlbums_;
}
/**
*
*
* <pre>
* Output only. List of shared albums.
* </pre>
*
* <code>repeated .google.photos.types.Album shared_albums = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.photos.types.proto.AlbumOrBuilder>
getSharedAlbumsOrBuilderList() {
return sharedAlbums_;
}
/**
*
*
* <pre>
* Output only. List of shared albums.
* </pre>
*
* <code>repeated .google.photos.types.Album shared_albums = 1;</code>
*/
@java.lang.Override
public int getSharedAlbumsCount() {
return sharedAlbums_.size();
}
/**
*
*
* <pre>
* Output only. List of shared albums.
* </pre>
*
* <code>repeated .google.photos.types.Album shared_albums = 1;</code>
*/
@java.lang.Override
public com.google.photos.types.proto.Album getSharedAlbums(int index) {
return sharedAlbums_.get(index);
}
/**
*
*
* <pre>
* Output only. List of shared albums.
* </pre>
*
* <code>repeated .google.photos.types.Album shared_albums = 1;</code>
*/
@java.lang.Override
public com.google.photos.types.proto.AlbumOrBuilder getSharedAlbumsOrBuilder(int index) {
return sharedAlbums_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Output only. Token to use to get the next set of shared albums. Populated
* if there are more shared albums to retrieve for this request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. Token to use to get the next set of shared albums. Populated
* if there are more shared albums to retrieve for this request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < sharedAlbums_.size(); i++) {
output.writeMessage(1, sharedAlbums_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < sharedAlbums_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, sharedAlbums_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.photos.library.v1.proto.ListSharedAlbumsResponse)) {
return super.equals(obj);
}
com.google.photos.library.v1.proto.ListSharedAlbumsResponse other =
(com.google.photos.library.v1.proto.ListSharedAlbumsResponse) obj;
if (!getSharedAlbumsList().equals(other.getSharedAlbumsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getSharedAlbumsCount() > 0) {
hash = (37 * hash) + SHARED_ALBUMS_FIELD_NUMBER;
hash = (53 * hash) + getSharedAlbumsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.photos.library.v1.proto.ListSharedAlbumsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.photos.library.v1.proto.ListSharedAlbumsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.photos.library.v1.proto.ListSharedAlbumsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.photos.library.v1.proto.ListSharedAlbumsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.photos.library.v1.proto.ListSharedAlbumsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.photos.library.v1.proto.ListSharedAlbumsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.photos.library.v1.proto.ListSharedAlbumsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.photos.library.v1.proto.ListSharedAlbumsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.photos.library.v1.proto.ListSharedAlbumsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.photos.library.v1.proto.ListSharedAlbumsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.photos.library.v1.proto.ListSharedAlbumsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.photos.library.v1.proto.ListSharedAlbumsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.photos.library.v1.proto.ListSharedAlbumsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* List of shared albums requested.
* </pre>
*
* Protobuf type {@code google.photos.library.v1.ListSharedAlbumsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.photos.library.v1.ListSharedAlbumsResponse)
com.google.photos.library.v1.proto.ListSharedAlbumsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.photos.library.v1.proto.LibraryServiceProto
.internal_static_google_photos_library_v1_ListSharedAlbumsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.photos.library.v1.proto.LibraryServiceProto
.internal_static_google_photos_library_v1_ListSharedAlbumsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.photos.library.v1.proto.ListSharedAlbumsResponse.class,
com.google.photos.library.v1.proto.ListSharedAlbumsResponse.Builder.class);
}
// Construct using com.google.photos.library.v1.proto.ListSharedAlbumsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (sharedAlbumsBuilder_ == null) {
sharedAlbums_ = java.util.Collections.emptyList();
} else {
sharedAlbums_ = null;
sharedAlbumsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.photos.library.v1.proto.LibraryServiceProto
.internal_static_google_photos_library_v1_ListSharedAlbumsResponse_descriptor;
}
@java.lang.Override
public com.google.photos.library.v1.proto.ListSharedAlbumsResponse getDefaultInstanceForType() {
return com.google.photos.library.v1.proto.ListSharedAlbumsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.photos.library.v1.proto.ListSharedAlbumsResponse build() {
com.google.photos.library.v1.proto.ListSharedAlbumsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.photos.library.v1.proto.ListSharedAlbumsResponse buildPartial() {
com.google.photos.library.v1.proto.ListSharedAlbumsResponse result =
new com.google.photos.library.v1.proto.ListSharedAlbumsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.photos.library.v1.proto.ListSharedAlbumsResponse result) {
if (sharedAlbumsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
sharedAlbums_ = java.util.Collections.unmodifiableList(sharedAlbums_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.sharedAlbums_ = sharedAlbums_;
} else {
result.sharedAlbums_ = sharedAlbumsBuilder_.build();
}
}
private void buildPartial0(com.google.photos.library.v1.proto.ListSharedAlbumsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.photos.library.v1.proto.ListSharedAlbumsResponse) {
return mergeFrom((com.google.photos.library.v1.proto.ListSharedAlbumsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.photos.library.v1.proto.ListSharedAlbumsResponse other) {
if (other == com.google.photos.library.v1.proto.ListSharedAlbumsResponse.getDefaultInstance())
return this;
if (sharedAlbumsBuilder_ == null) {
if (!other.sharedAlbums_.isEmpty()) {
if (sharedAlbums_.isEmpty()) {
sharedAlbums_ = other.sharedAlbums_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureSharedAlbumsIsMutable();
sharedAlbums_.addAll(other.sharedAlbums_);
}
onChanged();
}
} else {
if (!other.sharedAlbums_.isEmpty()) {
if (sharedAlbumsBuilder_.isEmpty()) {
sharedAlbumsBuilder_.dispose();
sharedAlbumsBuilder_ = null;
sharedAlbums_ = other.sharedAlbums_;
bitField0_ = (bitField0_ & ~0x00000001);
sharedAlbumsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getSharedAlbumsFieldBuilder()
: null;
} else {
sharedAlbumsBuilder_.addAllMessages(other.sharedAlbums_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.photos.types.proto.Album m =
input.readMessage(
com.google.photos.types.proto.Album.parser(), extensionRegistry);
if (sharedAlbumsBuilder_ == null) {
ensureSharedAlbumsIsMutable();
sharedAlbums_.add(m);
} else {
sharedAlbumsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.photos.types.proto.Album> sharedAlbums_ =
java.util.Collections.emptyList();
private void ensureSharedAlbumsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
sharedAlbums_ = new java.util.ArrayList<com.google.photos.types.proto.Album>(sharedAlbums_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.photos.types.proto.Album,
com.google.photos.types.proto.Album.Builder,
com.google.photos.types.proto.AlbumOrBuilder>
sharedAlbumsBuilder_;
/**
*
*
* <pre>
* Output only. List of shared albums.
* </pre>
*
* <code>repeated .google.photos.types.Album shared_albums = 1;</code>
*/
public java.util.List<com.google.photos.types.proto.Album> getSharedAlbumsList() {
if (sharedAlbumsBuilder_ == null) {
return java.util.Collections.unmodifiableList(sharedAlbums_);
} else {
return sharedAlbumsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Output only. List of shared albums.
* </pre>
*
* <code>repeated .google.photos.types.Album shared_albums = 1;</code>
*/
public int getSharedAlbumsCount() {
if (sharedAlbumsBuilder_ == null) {
return sharedAlbums_.size();
} else {
return sharedAlbumsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Output only. List of shared albums.
* </pre>
*
* <code>repeated .google.photos.types.Album shared_albums = 1;</code>
*/
public com.google.photos.types.proto.Album getSharedAlbums(int index) {
if (sharedAlbumsBuilder_ == null) {
return sharedAlbums_.get(index);
} else {
return sharedAlbumsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Output only. List of shared albums.
* </pre>
*
* <code>repeated .google.photos.types.Album shared_albums = 1;</code>
*/
public Builder setSharedAlbums(int index, com.google.photos.types.proto.Album value) {
if (sharedAlbumsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSharedAlbumsIsMutable();
sharedAlbums_.set(index, value);
onChanged();
} else {
sharedAlbumsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Output only. List of shared albums.
* </pre>
*
* <code>repeated .google.photos.types.Album shared_albums = 1;</code>
*/
public Builder setSharedAlbums(
int index, com.google.photos.types.proto.Album.Builder builderForValue) {
if (sharedAlbumsBuilder_ == null) {
ensureSharedAlbumsIsMutable();
sharedAlbums_.set(index, builderForValue.build());
onChanged();
} else {
sharedAlbumsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. List of shared albums.
* </pre>
*
* <code>repeated .google.photos.types.Album shared_albums = 1;</code>
*/
public Builder addSharedAlbums(com.google.photos.types.proto.Album value) {
if (sharedAlbumsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSharedAlbumsIsMutable();
sharedAlbums_.add(value);
onChanged();
} else {
sharedAlbumsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Output only. List of shared albums.
* </pre>
*
* <code>repeated .google.photos.types.Album shared_albums = 1;</code>
*/
public Builder addSharedAlbums(int index, com.google.photos.types.proto.Album value) {
if (sharedAlbumsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSharedAlbumsIsMutable();
sharedAlbums_.add(index, value);
onChanged();
} else {
sharedAlbumsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Output only. List of shared albums.
* </pre>
*
* <code>repeated .google.photos.types.Album shared_albums = 1;</code>
*/
public Builder addSharedAlbums(com.google.photos.types.proto.Album.Builder builderForValue) {
if (sharedAlbumsBuilder_ == null) {
ensureSharedAlbumsIsMutable();
sharedAlbums_.add(builderForValue.build());
onChanged();
} else {
sharedAlbumsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. List of shared albums.
* </pre>
*
* <code>repeated .google.photos.types.Album shared_albums = 1;</code>
*/
public Builder addSharedAlbums(
int index, com.google.photos.types.proto.Album.Builder builderForValue) {
if (sharedAlbumsBuilder_ == null) {
ensureSharedAlbumsIsMutable();
sharedAlbums_.add(index, builderForValue.build());
onChanged();
} else {
sharedAlbumsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. List of shared albums.
* </pre>
*
* <code>repeated .google.photos.types.Album shared_albums = 1;</code>
*/
public Builder addAllSharedAlbums(
java.lang.Iterable<? extends com.google.photos.types.proto.Album> values) {
if (sharedAlbumsBuilder_ == null) {
ensureSharedAlbumsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, sharedAlbums_);
onChanged();
} else {
sharedAlbumsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Output only. List of shared albums.
* </pre>
*
* <code>repeated .google.photos.types.Album shared_albums = 1;</code>
*/
public Builder clearSharedAlbums() {
if (sharedAlbumsBuilder_ == null) {
sharedAlbums_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
sharedAlbumsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Output only. List of shared albums.
* </pre>
*
* <code>repeated .google.photos.types.Album shared_albums = 1;</code>
*/
public Builder removeSharedAlbums(int index) {
if (sharedAlbumsBuilder_ == null) {
ensureSharedAlbumsIsMutable();
sharedAlbums_.remove(index);
onChanged();
} else {
sharedAlbumsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Output only. List of shared albums.
* </pre>
*
* <code>repeated .google.photos.types.Album shared_albums = 1;</code>
*/
public com.google.photos.types.proto.Album.Builder getSharedAlbumsBuilder(int index) {
return getSharedAlbumsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Output only. List of shared albums.
* </pre>
*
* <code>repeated .google.photos.types.Album shared_albums = 1;</code>
*/
public com.google.photos.types.proto.AlbumOrBuilder getSharedAlbumsOrBuilder(int index) {
if (sharedAlbumsBuilder_ == null) {
return sharedAlbums_.get(index);
} else {
return sharedAlbumsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Output only. List of shared albums.
* </pre>
*
* <code>repeated .google.photos.types.Album shared_albums = 1;</code>
*/
public java.util.List<? extends com.google.photos.types.proto.AlbumOrBuilder>
getSharedAlbumsOrBuilderList() {
if (sharedAlbumsBuilder_ != null) {
return sharedAlbumsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(sharedAlbums_);
}
}
/**
*
*
* <pre>
* Output only. List of shared albums.
* </pre>
*
* <code>repeated .google.photos.types.Album shared_albums = 1;</code>
*/
public com.google.photos.types.proto.Album.Builder addSharedAlbumsBuilder() {
return getSharedAlbumsFieldBuilder()
.addBuilder(com.google.photos.types.proto.Album.getDefaultInstance());
}
/**
*
*
* <pre>
* Output only. List of shared albums.
* </pre>
*
* <code>repeated .google.photos.types.Album shared_albums = 1;</code>
*/
public com.google.photos.types.proto.Album.Builder addSharedAlbumsBuilder(int index) {
return getSharedAlbumsFieldBuilder()
.addBuilder(index, com.google.photos.types.proto.Album.getDefaultInstance());
}
/**
*
*
* <pre>
* Output only. List of shared albums.
* </pre>
*
* <code>repeated .google.photos.types.Album shared_albums = 1;</code>
*/
public java.util.List<com.google.photos.types.proto.Album.Builder>
getSharedAlbumsBuilderList() {
return getSharedAlbumsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.photos.types.proto.Album,
com.google.photos.types.proto.Album.Builder,
com.google.photos.types.proto.AlbumOrBuilder>
getSharedAlbumsFieldBuilder() {
if (sharedAlbumsBuilder_ == null) {
sharedAlbumsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.photos.types.proto.Album,
com.google.photos.types.proto.Album.Builder,
com.google.photos.types.proto.AlbumOrBuilder>(
sharedAlbums_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
sharedAlbums_ = null;
}
return sharedAlbumsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Output only. Token to use to get the next set of shared albums. Populated
* if there are more shared albums to retrieve for this request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. Token to use to get the next set of shared albums. Populated
* if there are more shared albums to retrieve for this request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. Token to use to get the next set of shared albums. Populated
* if there are more shared albums to retrieve for this request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Token to use to get the next set of shared albums. Populated
* if there are more shared albums to retrieve for this request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Token to use to get the next set of shared albums. Populated
* if there are more shared albums to retrieve for this request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.photos.library.v1.ListSharedAlbumsResponse)
}
// @@protoc_insertion_point(class_scope:google.photos.library.v1.ListSharedAlbumsResponse)
private static final com.google.photos.library.v1.proto.ListSharedAlbumsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.photos.library.v1.proto.ListSharedAlbumsResponse();
}
public static com.google.photos.library.v1.proto.ListSharedAlbumsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListSharedAlbumsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListSharedAlbumsResponse>() {
@java.lang.Override
public ListSharedAlbumsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListSharedAlbumsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListSharedAlbumsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.photos.library.v1.proto.ListSharedAlbumsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/solr | 36,118 | solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.update;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.net.SocketException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.solr.BaseDistributedSearchTestCase;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.request.LukeRequest;
import org.apache.solr.client.solrj.request.UpdateRequest;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.cloud.ZkCoreNodeProps;
import org.apache.solr.common.cloud.ZkNodeProps;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrEventListener;
import org.apache.solr.embedded.JettySolrRunner;
import org.apache.solr.index.LogDocMergePolicyFactory;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.update.MockStreamingSolrClients.Exp;
import org.apache.solr.update.SolrCmdDistributor.ForwardNode;
import org.apache.solr.update.SolrCmdDistributor.Node;
import org.apache.solr.update.SolrCmdDistributor.SolrError;
import org.apache.solr.update.SolrCmdDistributor.StdNode;
import org.apache.solr.update.processor.DistributedUpdateProcessor;
import org.apache.solr.update.processor.DistributedUpdateProcessor.LeaderRequestReplicationTracker;
import org.apache.solr.update.processor.DistributedUpdateProcessor.RollupRequestReplicationTracker;
import org.apache.solr.util.TestInjection;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
// See: https://issues.apache.org/jira/browse/SOLR-12028 Tests cannot remove files on Windows
// machines occasionally
public class SolrCmdDistributorTest extends BaseDistributedSearchTestCase {
private static enum NodeType {
FORWARD,
STANDARD
};
private final AtomicInteger uniqueId = new AtomicInteger();
@BeforeClass
public static void beforeClass() {
// we can't use the Randomized merge policy because the test depends on
// being able to call optimize to have all deletes expunged.
systemSetPropertySolrTestsMergePolicyFactory(LogDocMergePolicyFactory.class.getName());
System.setProperty("solr.cloud.client.pollQueueTime", "2000");
}
@AfterClass
public static void afterClass() {
systemClearPropertySolrTestsMergePolicyFactory();
System.clearProperty("solr.cloud.client.pollQueueTime");
}
private UpdateShardHandler updateShardHandler;
public SolrCmdDistributorTest() {
updateShardHandler = new UpdateShardHandler(UpdateShardHandlerConfig.DEFAULT);
stress = 0;
}
public static String getSchemaFile() {
return "schema.xml";
}
public static String getSolrConfigFile() {
// use this because it has /update and is minimal
return "solrconfig-tlog.xml";
}
// TODO: for now we redefine this method so that it pulls from the above
// we don't get helpful override behavior due to the method being static
@Override
protected void createServers(int numShards) throws Exception {
System.setProperty("configSetBaseDir", TEST_HOME().toString());
Path controlHome = testDir.resolve("control");
seedSolrHome(controlHome);
writeCoreProperties(
controlHome.resolve("cores").resolve(DEFAULT_TEST_CORENAME), DEFAULT_TEST_CORENAME);
controlJetty =
createJetty(
controlHome, testDir + "/control/data", null, getSolrConfigFile(), getSchemaFile());
controlJetty.start();
controlClient = createNewSolrClient(controlJetty.getLocalPort());
shardsArr = new String[numShards];
StringBuilder sb = new StringBuilder();
for (int i = 0; i < numShards; i++) {
if (sb.length() > 0) sb.append(',');
String shardname = "shard" + i;
Path shardHome = testDir.resolve(shardname);
seedSolrHome(shardHome);
Path coresPath = shardHome.resolve("cores");
writeCoreProperties(coresPath.resolve(DEFAULT_TEST_CORENAME), DEFAULT_TEST_CORENAME);
JettySolrRunner j =
createJetty(
shardHome,
testDir + "/shard" + i + "/data",
null,
getSolrConfigFile(),
getSchemaFile());
j.start();
jettys.add(j);
clients.add(createNewSolrClient(j.getLocalPort()));
String shardStr = buildUrl(j.getLocalPort());
shardsArr[i] = shardStr;
sb.append(shardStr);
}
shards = sb.toString();
}
@SuppressWarnings("unchecked")
@Test
@ShardsFixed(num = 4)
public void test() throws Exception {
del("*:*");
ModifiableSolrParams params = new ModifiableSolrParams();
List<Node> nodes = new ArrayList<>();
AddUpdateCommand cmd = new AddUpdateCommand(null);
List<SolrError> errors;
CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false);
long numFound;
HttpSolrClient client;
ZkNodeProps nodeProps;
try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler)) {
nodeProps =
new ZkNodeProps(
ZkStateReader.BASE_URL_PROP,
((HttpSolrClient) controlClient).getBaseURL(),
ZkStateReader.CORE_NAME_PROP,
controlClient.getDefaultCollection());
nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps)));
// add one doc to controlClient
cmd.solrDoc = sdoc("id", uniqueId.incrementAndGet());
params = new ModifiableSolrParams();
cmdDistrib.distribAdd(cmd, nodes, params);
params = new ModifiableSolrParams();
params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
cmdDistrib.distribCommit(ccmd, nodes, params);
cmdDistrib.finish();
errors = cmdDistrib.getErrors();
assertEquals(errors.toString(), 0, errors.size());
numFound = controlClient.query(new SolrQuery("*:*")).getResults().getNumFound();
assertEquals(1, numFound);
client = (HttpSolrClient) clients.get(0);
nodeProps =
new ZkNodeProps(
ZkStateReader.BASE_URL_PROP,
client.getBaseURL(),
ZkStateReader.CORE_NAME_PROP,
client.getDefaultCollection());
nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps)));
}
int id2;
// add another 2 docs to control and 3 to client
try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler)) {
cmd.solrDoc = sdoc("id", uniqueId.incrementAndGet());
params = new ModifiableSolrParams();
params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
cmdDistrib.distribAdd(cmd, nodes, params);
id2 = uniqueId.incrementAndGet();
AddUpdateCommand cmd2 = new AddUpdateCommand(null);
cmd2.solrDoc = sdoc("id", id2);
params = new ModifiableSolrParams();
params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
cmdDistrib.distribAdd(cmd2, nodes, params);
AddUpdateCommand cmd3 = new AddUpdateCommand(null);
cmd3.solrDoc = sdoc("id", uniqueId.incrementAndGet());
params = new ModifiableSolrParams();
params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
cmdDistrib.distribAdd(cmd3, Collections.singletonList(nodes.get(1)), params);
params = new ModifiableSolrParams();
params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
cmdDistrib.distribCommit(ccmd, nodes, params);
cmdDistrib.finish();
errors = cmdDistrib.getErrors();
}
assertEquals(errors.toString(), 0, errors.size());
SolrDocumentList results = controlClient.query(new SolrQuery("*:*")).getResults();
numFound = results.getNumFound();
assertEquals(results.toString(), 3, numFound);
numFound = client.query(new SolrQuery("*:*")).getResults().getNumFound();
assertEquals(3, numFound);
// now delete doc 2 which is on both control and client1
DeleteUpdateCommand dcmd = new DeleteUpdateCommand(null);
dcmd.id = Integer.toString(id2);
try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler)) {
params = new ModifiableSolrParams();
params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
cmdDistrib.distribDelete(dcmd, nodes, params);
params = new ModifiableSolrParams();
params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
cmdDistrib.distribCommit(ccmd, nodes, params);
cmdDistrib.finish();
errors = cmdDistrib.getErrors();
}
assertEquals(errors.toString(), 0, errors.size());
results = controlClient.query(new SolrQuery("*:*")).getResults();
numFound = results.getNumFound();
assertEquals(results.toString(), 2, numFound);
numFound = client.query(new SolrQuery("*:*")).getResults().getNumFound();
assertEquals(results.toString(), 2, numFound);
for (SolrClient c : clients) {
c.optimize();
// System.out.println(clients.get(0).request(new LukeRequest()));
}
try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler)) {
int cnt = atLeast(303);
for (int i = 0; i < cnt; i++) {
nodes.clear();
for (SolrClient c : clients) {
if (random().nextBoolean()) {
continue;
}
HttpSolrClient httpClient = (HttpSolrClient) c;
nodeProps =
new ZkNodeProps(
ZkStateReader.BASE_URL_PROP,
httpClient.getBaseURL(),
ZkStateReader.CORE_NAME_PROP,
httpClient.getDefaultCollection());
nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps)));
}
AddUpdateCommand c = new AddUpdateCommand(null);
c.solrDoc = sdoc("id", uniqueId.incrementAndGet());
if (nodes.size() > 0) {
params = new ModifiableSolrParams();
cmdDistrib.distribAdd(c, nodes, params);
}
}
nodes.clear();
for (SolrClient c : clients) {
HttpSolrClient httpClient = (HttpSolrClient) c;
nodeProps =
new ZkNodeProps(
ZkStateReader.BASE_URL_PROP,
httpClient.getBaseURL(),
ZkStateReader.CORE_NAME_PROP,
httpClient.getDefaultCollection());
nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps)));
}
final AtomicInteger commits = new AtomicInteger();
for (JettySolrRunner jetty : jettys) {
CoreContainer cores = jetty.getCoreContainer();
try (SolrCore core = cores.getCore("collection1")) {
core.getUpdateHandler()
.registerCommitCallback(
new SolrEventListener() {
@Override
public void postSoftCommit() {}
@Override
public void postCommit() {
commits.incrementAndGet();
}
@Override
public void newSearcher(
SolrIndexSearcher newSearcher, SolrIndexSearcher currentSearcher) {}
});
}
}
params = new ModifiableSolrParams();
params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
cmdDistrib.distribCommit(ccmd, nodes, params);
cmdDistrib.finish();
assertEquals(getShardCount(), commits.get());
for (SolrClient c : clients) {
NamedList<Object> resp = c.request(new LukeRequest());
assertEquals(
"SOLR-3428: We only did adds - there should be no deletes",
((NamedList<Object>) resp.get("index")).get("numDocs"),
((NamedList<Object>) resp.get("index")).get("maxDoc"));
}
}
testMaxRetries(NodeType.FORWARD);
testMaxRetries(NodeType.STANDARD);
testOneRetry(NodeType.FORWARD);
testOneRetry(NodeType.STANDARD);
testRetryNodeAgainstBadAddress();
testStdNodeRetriesSocketError();
testForwardNodeWontRetrySocketError();
testNodeWontRetryBadRequest(NodeType.FORWARD);
testNodeWontRetryBadRequest(NodeType.STANDARD);
testMinRfOnRetries(NodeType.FORWARD);
testMinRfOnRetries(NodeType.STANDARD);
testDistribOpenSearcher();
testReqShouldRetryNoRetries();
testReqShouldRetryMaxRetries();
testReqShouldRetryBadRequest();
testReqShouldRetryNotFound();
testReqShouldRetryDBQ();
testDeletes(false, true);
testDeletes(false, false);
testDeletes(true, true);
testDeletes(true, false);
getRfFromResponseShouldNotCloseTheInputStream();
testStuckUpdates();
}
private void testDeletes(boolean dbq, boolean withFailures) throws Exception {
final HttpSolrClient solrclient = (HttpSolrClient) clients.get(0);
solrclient.commit(true, true);
long numFoundBefore = solrclient.query(new SolrQuery("*:*")).getResults().getNumFound();
final MockStreamingSolrClients streamingClients =
new MockStreamingSolrClients(updateShardHandler);
try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(streamingClients, 0)) {
if (withFailures) {
streamingClients.setExp(Exp.CONNECT_EXCEPTION);
}
ArrayList<Node> nodes = new ArrayList<>();
final AtomicInteger retries = new AtomicInteger();
ZkNodeProps nodeProps =
new ZkNodeProps(
ZkStateReader.BASE_URL_PROP,
solrclient.getBaseURL(),
ZkStateReader.CORE_NAME_PROP,
solrclient.getDefaultCollection());
Node retryNode =
new StdNode(new ZkCoreNodeProps(nodeProps), "collection1", "shard1", 5) {
@Override
public boolean checkRetry(SolrError err) {
streamingClients.setExp(null);
retries.incrementAndGet();
return super.checkRetry(err);
}
};
nodes.add(retryNode);
for (int i = 0; i < 5; i++) {
AddUpdateCommand cmd = new AddUpdateCommand(null);
int currentId = uniqueId.incrementAndGet();
cmd.solrDoc = sdoc("id", currentId);
ModifiableSolrParams params = new ModifiableSolrParams();
cmdDistrib.distribAdd(cmd, nodes, params);
DeleteUpdateCommand dcmd = new DeleteUpdateCommand(null);
if (dbq) {
dcmd.setQuery("id:" + currentId);
} else {
dcmd.setId(String.valueOf(currentId));
}
cmdDistrib.distribDelete(dcmd, nodes, params, false, null, null);
}
CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false);
cmdDistrib.distribCommit(ccmd, nodes, new ModifiableSolrParams());
cmdDistrib.finish();
int expectedRetryCount = 0;
if (withFailures) {
if (dbq) {
expectedRetryCount = 1; // just the first cmd would be retried
} else {
expectedRetryCount = 10;
}
}
assertEquals(expectedRetryCount, retries.get());
long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults().getNumFound();
// we will get java.net.ConnectException which we retry on
assertEquals(numFoundBefore, numFoundAfter);
assertEquals(0, cmdDistrib.getErrors().size());
}
}
private void testMinRfOnRetries(NodeType nodeType) throws Exception {
final HttpSolrClient solrclient = (HttpSolrClient) clients.get(0);
final MockStreamingSolrClients streamingClients =
new MockStreamingSolrClients(updateShardHandler);
try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(streamingClients, 0)) {
streamingClients.setExp(Exp.CONNECT_EXCEPTION);
ArrayList<Node> nodes = new ArrayList<>();
final AtomicInteger retries = new AtomicInteger();
ZkNodeProps nodeProps =
new ZkNodeProps(
ZkStateReader.BASE_URL_PROP,
solrclient.getBaseURL(),
ZkStateReader.CORE_NAME_PROP,
solrclient.getDefaultCollection());
if (nodeType == NodeType.FORWARD) {
nodes.add(
new ForwardNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1", 5) {
@Override
public boolean checkRetry(SolrError err) {
if (retries.incrementAndGet() >= 3) {
streamingClients.setExp(null);
}
return super.checkRetry(err);
}
});
} else {
nodes.add(
new StdNode(new ZkCoreNodeProps(nodeProps), "collection1", "shard1", 5) {
@Override
public boolean checkRetry(SolrError err) {
if (retries.incrementAndGet() >= 3) {
streamingClients.setExp(null);
}
return super.checkRetry(err);
}
});
}
AddUpdateCommand cmd = new AddUpdateCommand(null);
cmd.solrDoc = sdoc("id", uniqueId.incrementAndGet());
ModifiableSolrParams params = new ModifiableSolrParams();
RollupRequestReplicationTracker rollupReqTracker = new RollupRequestReplicationTracker();
LeaderRequestReplicationTracker leaderReqTracker =
new LeaderRequestReplicationTracker("shard1");
cmdDistrib.distribAdd(cmd, nodes, params, false, rollupReqTracker, leaderReqTracker);
cmdDistrib.finish();
assertEquals(3, retries.get());
// "2" here is because one would be the leader, that creates the instance of
// LeaderRequestReplicationTracker, the second one is the node
assertEquals(2, leaderReqTracker.getAchievedRf());
assertEquals(0, cmdDistrib.getErrors().size());
}
}
private void testMaxRetries(NodeType nodeType) throws IOException {
final MockStreamingSolrClients streamingClients =
new MockStreamingSolrClients(updateShardHandler);
try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(streamingClients, 0)) {
streamingClients.setExp(Exp.CONNECT_EXCEPTION);
ArrayList<Node> nodes = new ArrayList<>();
final HttpSolrClient solrClient1 = (HttpSolrClient) clients.get(0);
final AtomicInteger retries = new AtomicInteger();
ZkNodeProps nodeProps =
new ZkNodeProps(
ZkStateReader.BASE_URL_PROP,
solrClient1.getBaseURL(),
ZkStateReader.CORE_NAME_PROP,
solrClient1.getDefaultCollection());
Node retryNode;
if (nodeType == NodeType.FORWARD) {
retryNode =
new ForwardNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1", 6) {
@Override
public boolean checkRetry(SolrError err) {
retries.incrementAndGet();
return super.checkRetry(err);
}
};
} else {
retryNode =
new StdNode(new ZkCoreNodeProps(nodeProps), "collection1", "shard1", 6) {
@Override
public boolean checkRetry(SolrError err) {
retries.incrementAndGet();
return super.checkRetry(err);
}
};
}
nodes.add(retryNode);
AddUpdateCommand cmd = new AddUpdateCommand(null);
cmd.solrDoc = sdoc("id", uniqueId.incrementAndGet());
ModifiableSolrParams params = new ModifiableSolrParams();
cmdDistrib.distribAdd(cmd, nodes, params);
cmdDistrib.finish();
assertEquals(7, retries.get());
assertEquals(1, cmdDistrib.getErrors().size());
}
}
private void testReqShouldRetryNoRetries() {
SolrError err = getError(new SocketException());
SolrCmdDistributor.Req req =
new SolrCmdDistributor.Req(
null, new StdNode(null, "collection1", "shard1", 0), new UpdateRequest(), true);
assertFalse(req.shouldRetry(err));
}
private void testReqShouldRetryDBQ() {
SolrError err = getError(new SocketException());
UpdateRequest dbqReq = new UpdateRequest();
dbqReq.deleteByQuery("*:*");
SolrCmdDistributor.Req req =
new SolrCmdDistributor.Req(
null, new StdNode(null, "collection1", "shard1", 1), dbqReq, true);
assertFalse(req.shouldRetry(err));
}
public void getRfFromResponseShouldNotCloseTheInputStream() {
UpdateRequest dbqReq = new UpdateRequest();
dbqReq.deleteByQuery("*:*");
SolrCmdDistributor.Req req =
new SolrCmdDistributor.Req(
null, new StdNode(null, "collection1", "shard1", 1), dbqReq, true);
AtomicBoolean isClosed = new AtomicBoolean(false);
ByteArrayInputStream is =
new ByteArrayInputStream(new byte[100]) {
@Override
public void close() throws IOException {
isClosed.set(true);
super.close();
}
};
req.trackRequestResult(null, is, true);
assertFalse("Underlying stream should not be closed!", isClosed.get());
}
private void testReqShouldRetryMaxRetries() {
SolrError err = getError(new SocketException());
SolrCmdDistributor.Req req =
new SolrCmdDistributor.Req(
null, new StdNode(null, "collection1", "shard1", 1), new UpdateRequest(), true);
assertTrue(req.shouldRetry(err));
req.retries++;
assertFalse(req.shouldRetry(err));
}
private void testReqShouldRetryBadRequest() {
SolrError err = getError(new SolrException(SolrException.ErrorCode.BAD_REQUEST, "bad request"));
SolrCmdDistributor.Req req =
new SolrCmdDistributor.Req(
null, new StdNode(null, "collection1", "shard1", 1), new UpdateRequest(), true);
assertFalse(req.shouldRetry(err));
}
private void testReqShouldRetryNotFound() {
SolrError err = getError(new SolrException(SolrException.ErrorCode.NOT_FOUND, "not found"));
SolrCmdDistributor.Req req =
new SolrCmdDistributor.Req(
null, new StdNode(null, "collection1", "shard1", 1), new UpdateRequest(), true);
assertTrue(req.shouldRetry(err));
}
private SolrError getError(Exception e) {
SolrError err = new SolrError();
err.e = e;
if (e instanceof SolrException) {
err.statusCode = ((SolrException) e).code();
}
return err;
}
private void testOneRetry(NodeType nodeType) throws Exception {
final HttpSolrClient solrclient = (HttpSolrClient) clients.get(0);
long numFoundBefore = solrclient.query(new SolrQuery("*:*")).getResults().getNumFound();
final MockStreamingSolrClients streamingClients =
new MockStreamingSolrClients(updateShardHandler);
try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(streamingClients, 0)) {
streamingClients.setExp(Exp.CONNECT_EXCEPTION);
ArrayList<Node> nodes = new ArrayList<>();
final AtomicInteger retries = new AtomicInteger();
ZkNodeProps nodeProps =
new ZkNodeProps(
ZkStateReader.BASE_URL_PROP,
solrclient.getBaseURL(),
ZkStateReader.CORE_NAME_PROP,
solrclient.getDefaultCollection());
Node retryNode;
if (nodeType == NodeType.FORWARD) {
retryNode =
new ForwardNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1", 5) {
@Override
public boolean checkRetry(SolrError err) {
streamingClients.setExp(null);
retries.incrementAndGet();
return super.checkRetry(err);
}
};
} else {
retryNode =
new StdNode(new ZkCoreNodeProps(nodeProps), "collection1", "shard1", 5) {
@Override
public boolean checkRetry(SolrError err) {
streamingClients.setExp(null);
retries.incrementAndGet();
return super.checkRetry(err);
}
};
}
nodes.add(retryNode);
AddUpdateCommand cmd = new AddUpdateCommand(null);
cmd.solrDoc = sdoc("id", uniqueId.incrementAndGet());
ModifiableSolrParams params = new ModifiableSolrParams();
CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false);
cmdDistrib.distribAdd(cmd, nodes, params);
cmdDistrib.distribCommit(ccmd, nodes, params);
cmdDistrib.finish();
assertEquals(1, retries.get());
long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults().getNumFound();
// we will get java.net.ConnectException which we retry on
assertEquals(numFoundBefore + 1, numFoundAfter);
assertEquals(0, cmdDistrib.getErrors().size());
}
}
private void testNodeWontRetryBadRequest(NodeType nodeType) throws Exception {
ignoreException("Bad Request");
final HttpSolrClient solrclient = (HttpSolrClient) clients.get(0);
long numFoundBefore = solrclient.query(new SolrQuery("*:*")).getResults().getNumFound();
final MockStreamingSolrClients streamingClients =
new MockStreamingSolrClients(updateShardHandler);
try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(streamingClients, 0)) {
streamingClients.setExp(Exp.BAD_REQUEST);
ArrayList<Node> nodes = new ArrayList<>();
ZkNodeProps nodeProps =
new ZkNodeProps(
ZkStateReader.BASE_URL_PROP,
solrclient.getBaseURL(),
ZkStateReader.CORE_NAME_PROP,
solrclient.getDefaultCollection());
final AtomicInteger retries = new AtomicInteger();
Node retryNode;
if (nodeType == NodeType.FORWARD) {
retryNode =
new ForwardNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1", 5) {
@Override
public boolean checkRetry(SolrError err) {
retries.incrementAndGet();
return super.checkRetry(err);
}
};
} else {
retryNode =
new StdNode(new ZkCoreNodeProps(nodeProps), "collection1", "shard1", 5) {
@Override
public boolean checkRetry(SolrError err) {
retries.incrementAndGet();
return super.checkRetry(err);
}
};
}
nodes.add(retryNode);
AddUpdateCommand cmd = new AddUpdateCommand(null);
cmd.solrDoc = sdoc("id", uniqueId.incrementAndGet());
ModifiableSolrParams params = new ModifiableSolrParams();
CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false);
cmdDistrib.distribAdd(cmd, nodes, params);
streamingClients.setExp(null);
cmdDistrib.distribCommit(ccmd, nodes, params);
cmdDistrib.finish();
// it will checkRetry, but not actually do it...
assertEquals(1, retries.get());
long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults().getNumFound();
// we will get java.net.SocketException: Network is unreachable, which we don't retry on
assertEquals(numFoundBefore, numFoundAfter);
assertEquals(1, cmdDistrib.getErrors().size());
unIgnoreException("Bad Request");
}
}
private void testForwardNodeWontRetrySocketError() throws Exception {
final HttpSolrClient solrclient = (HttpSolrClient) clients.get(0);
long numFoundBefore = solrclient.query(new SolrQuery("*:*")).getResults().getNumFound();
final MockStreamingSolrClients streamingClients =
new MockStreamingSolrClients(updateShardHandler);
try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(streamingClients, 0)) {
streamingClients.setExp(Exp.SOCKET_EXCEPTION);
ArrayList<Node> nodes = new ArrayList<>();
final AtomicInteger retries = new AtomicInteger();
ZkNodeProps nodeProps =
new ZkNodeProps(
ZkStateReader.BASE_URL_PROP,
solrclient.getBaseURL(),
ZkStateReader.CORE_NAME_PROP,
solrclient.getDefaultCollection());
ForwardNode retryNode =
new ForwardNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1", 5) {
@Override
public boolean checkRetry(SolrError err) {
retries.incrementAndGet();
return super.checkRetry(err);
}
};
nodes.add(retryNode);
AddUpdateCommand cmd = new AddUpdateCommand(null);
cmd.solrDoc = sdoc("id", uniqueId.incrementAndGet());
ModifiableSolrParams params = new ModifiableSolrParams();
CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false);
cmdDistrib.distribAdd(cmd, nodes, params);
streamingClients.setExp(null);
cmdDistrib.distribCommit(ccmd, nodes, params);
cmdDistrib.finish();
// it will checkRetry, but not actually do it...
assertEquals(1, retries.get());
long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults().getNumFound();
// we will get java.net.SocketException: Network is unreachable, which we don't retry on
assertEquals(numFoundBefore, numFoundAfter);
assertEquals(1, cmdDistrib.getErrors().size());
}
}
private void testStdNodeRetriesSocketError() throws Exception {
final HttpSolrClient solrclient = (HttpSolrClient) clients.get(0);
final MockStreamingSolrClients streamingClients =
new MockStreamingSolrClients(updateShardHandler);
try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(streamingClients, 0)) {
streamingClients.setExp(Exp.SOCKET_EXCEPTION);
ArrayList<Node> nodes = new ArrayList<>();
final AtomicInteger retries = new AtomicInteger();
ZkNodeProps nodeProps =
new ZkNodeProps(
ZkStateReader.BASE_URL_PROP,
solrclient.getBaseURL(),
ZkStateReader.CORE_NAME_PROP,
solrclient.getDefaultCollection());
Node retryNode =
new StdNode(new ZkCoreNodeProps(nodeProps), "collection1", "shard1", 5) {
@Override
public boolean checkRetry(SolrError err) {
retries.incrementAndGet();
return super.checkRetry(err);
}
};
nodes.add(retryNode);
AddUpdateCommand cmd = new AddUpdateCommand(null);
cmd.solrDoc = sdoc("id", uniqueId.incrementAndGet());
ModifiableSolrParams params = new ModifiableSolrParams();
cmdDistrib.distribAdd(cmd, nodes, params);
cmdDistrib.finish();
// it will checkRetry, but not actually do it...
assertEquals(6, retries.get());
}
}
private void testRetryNodeAgainstBadAddress() throws SolrServerException, IOException {
// Test RetryNode
try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler)) {
final HttpSolrClient solrclient = (HttpSolrClient) clients.get(0);
long numFoundBefore = solrclient.query(new SolrQuery("*:*")).getResults().getNumFound();
ArrayList<Node> nodes = new ArrayList<>();
ZkNodeProps nodeProps =
new ZkNodeProps(
ZkStateReader.BASE_URL_PROP, DEAD_HOST_1 + "/solr", ZkStateReader.CORE_NAME_PROP, "");
ForwardNode retryNode =
new ForwardNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1", 5) {
@Override
public boolean checkRetry(SolrError err) {
ZkNodeProps leaderProps =
new ZkNodeProps(
ZkStateReader.BASE_URL_PROP,
solrclient.getBaseURL(),
ZkStateReader.CORE_NAME_PROP,
solrclient.getDefaultCollection());
this.nodeProps = new ZkCoreNodeProps(leaderProps);
return super.checkRetry(err);
}
};
nodes.add(retryNode);
AddUpdateCommand cmd = new AddUpdateCommand(null);
cmd.solrDoc = sdoc("id", uniqueId.incrementAndGet());
ModifiableSolrParams params = new ModifiableSolrParams();
cmdDistrib.distribAdd(cmd, nodes, params);
CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false);
params = new ModifiableSolrParams();
params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
cmdDistrib.distribCommit(ccmd, nodes, params);
cmdDistrib.finish();
long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults().getNumFound();
// different OS's will throw different exceptions for the bad address above
if (numFoundBefore != numFoundAfter) {
assertEquals(0, cmdDistrib.getErrors().size());
assertEquals(numFoundBefore + 1, numFoundAfter);
} else {
// we will get java.net.SocketException: Network is unreachable and not retry
assertEquals(numFoundBefore, numFoundAfter);
assertEquals(1, cmdDistrib.getErrors().size());
}
}
}
@Override
public void distribTearDown() throws Exception {
updateShardHandler.close();
super.distribTearDown();
}
private void testDistribOpenSearcher() {
try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler)) {
UpdateRequest updateRequest = new UpdateRequest();
CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false);
// test default value (should be true)
cmdDistrib.addCommit(updateRequest, ccmd);
boolean openSearcher = updateRequest.getParams().getBool(UpdateParams.OPEN_SEARCHER, false);
assertTrue(openSearcher);
// test openSearcher = false
ccmd.openSearcher = false;
cmdDistrib.addCommit(updateRequest, ccmd);
openSearcher = updateRequest.getParams().getBool(UpdateParams.OPEN_SEARCHER, true);
assertFalse(openSearcher);
}
}
private void testStuckUpdates() {
TestInjection.directUpdateLatch = new CountDownLatch(1);
List<Node> nodes = new ArrayList<>();
ModifiableSolrParams params;
try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler)) {
for (int i = 0; i < 3; i++) {
nodes.clear();
for (SolrClient c : clients) {
if (random().nextBoolean()) {
continue;
}
HttpSolrClient httpClient = (HttpSolrClient) c;
ZkNodeProps nodeProps =
new ZkNodeProps(
ZkStateReader.BASE_URL_PROP,
httpClient.getBaseURL(),
ZkStateReader.CORE_NAME_PROP,
"");
StdNode node = new StdNode(new ZkCoreNodeProps(nodeProps));
nodes.add(node);
}
AddUpdateCommand c = new AddUpdateCommand(null);
c.solrDoc = sdoc("id", uniqueId.incrementAndGet());
if (nodes.size() > 0) {
params = new ModifiableSolrParams();
cmdDistrib.distribAdd(c, nodes, params, false);
}
}
cmdDistrib.blockAndDoRetries();
} catch (IOException e) {
assertTrue(e.toString(), e.toString().contains("processing has stalled"));
} finally {
TestInjection.directUpdateLatch.countDown();
}
}
}
|
apache/ignite | 36,124 | modules/core/src/test/java/org/apache/ignite/internal/encryption/CacheGroupKeyChangeTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.encryption;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Random;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteDataStreamer;
import org.apache.ignite.IgniteException;
import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
import org.apache.ignite.cluster.ClusterState;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.DataRegionConfiguration;
import org.apache.ignite.configuration.DataStorageConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.configuration.WALMode;
import org.apache.ignite.internal.IgniteEx;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.TestRecordingCommunicationSpi;
import org.apache.ignite.internal.managers.discovery.DiscoveryCustomMessage;
import org.apache.ignite.internal.managers.encryption.GridEncryptionManager;
import org.apache.ignite.internal.pagemem.wal.IgniteWriteAheadLogManager;
import org.apache.ignite.internal.processors.cache.persistence.filename.NodeFileTree;
import org.apache.ignite.internal.processors.cache.persistence.wal.WALPointer;
import org.apache.ignite.internal.util.distributed.DistributedProcess.DistributedProcessType;
import org.apache.ignite.internal.util.distributed.InitMessage;
import org.apache.ignite.internal.util.distributed.SingleNodeMessage;
import org.apache.ignite.internal.util.future.GridFinishedFuture;
import org.apache.ignite.internal.util.typedef.G;
import org.apache.ignite.internal.util.typedef.T2;
import org.apache.ignite.internal.util.typedef.internal.CU;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteFuture;
import org.apache.ignite.spi.IgniteSpiException;
import org.apache.ignite.spi.discovery.tcp.TestTcpDiscoverySpi;
import org.apache.ignite.testframework.GridTestUtils.DiscoveryHook;
import org.junit.Test;
import static org.apache.ignite.configuration.WALMode.LOG_ONLY;
import static org.apache.ignite.configuration.WALMode.NONE;
import static org.apache.ignite.internal.managers.encryption.GridEncryptionManager.INITIAL_KEY_ID;
import static org.apache.ignite.spi.encryption.keystore.KeystoreEncryptionSpi.DEFAULT_MASTER_KEY_NAME;
import static org.apache.ignite.testframework.GridTestUtils.assertThrowsAnyCause;
import static org.apache.ignite.testframework.GridTestUtils.assertThrowsWithCause;
import static org.apache.ignite.testframework.GridTestUtils.runAsync;
import static org.apache.ignite.testframework.GridTestUtils.waitForCondition;
/**
* Cache group key change distributed process tests.
*/
public class CacheGroupKeyChangeTest extends AbstractEncryptionTest {
/** Timeout. */
private static final long MAX_AWAIT_MILLIS = 15_000;
/** 1 megabyte in bytes. */
private static final int MB = 1024 * 1024;
/** */
private static final String GRID_2 = "grid-2";
/** Discovery hook for distributed process. */
private InitMessageDiscoveryHook discoveryHook;
/** Count of cache backups. */
private int backups;
/** Number of WAL segments. */
private int walSegments = 10;
/** WAL mode. */
private WALMode walMode = LOG_ONLY;
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String name) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(name);
cfg.setConsistentId(name);
cfg.setCommunicationSpi(new TestRecordingCommunicationSpi());
if (discoveryHook != null)
((TestTcpDiscoverySpi)cfg.getDiscoverySpi()).discoveryHook(discoveryHook);
DataStorageConfiguration memCfg = new DataStorageConfiguration()
.setDefaultDataRegionConfiguration(
new DataRegionConfiguration()
.setMaxSize(100 * MB)
.setPersistenceEnabled(true))
.setPageSize(4 * 1024)
.setWalSegmentSize(MB)
.setWalSegments(walSegments)
.setMaxWalArchiveSize(2 * walSegments * MB)
.setCheckpointFrequency(30 * 1000L)
.setWalMode(walMode);
cfg.setDataStorageConfiguration(memCfg);
return cfg;
}
/** {@inheritDoc} */
@Override protected <K, V> CacheConfiguration<K, V> cacheConfiguration(String name, String grp) {
CacheConfiguration<K, V> cfg = super.cacheConfiguration(name, grp);
return cfg.setAffinity(new RendezvousAffinityFunction(false, 8)).setBackups(backups);
}
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
super.beforeTest();
stopAllGrids();
cleanPersistenceDir();
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
stopAllGrids();
cleanPersistenceDir();
super.afterTest();
}
/** @throws Exception If failed. */
@Test
@SuppressWarnings("ThrowableNotThrown")
public void testRejectNodeJoinDuringRotation() throws Exception {
T2<IgniteEx, IgniteEx> grids = startTestGrids(true);
createEncryptedCache(grids.get1(), grids.get2(), cacheName(), null);
int grpId = CU.cacheId(cacheName());
assertEquals(0, grids.get1().context().encryption().getActiveKey(grpId).id());
TestRecordingCommunicationSpi commSpi = TestRecordingCommunicationSpi.spi(grids.get2());
commSpi.blockMessages((node, msg) -> msg instanceof SingleNodeMessage);
IgniteFuture<Void> fut = grids.get1().encryption().changeCacheGroupKey(Collections.singleton(cacheName()));
commSpi.waitForBlocked();
assertThrowsWithCause(() -> startGrid(3), IgniteCheckedException.class);
commSpi.stopBlock();
fut.get();
checkGroupKey(grpId, INITIAL_KEY_ID + 1, MAX_AWAIT_MILLIS);
checkEncryptedCaches(grids.get1(), grids.get2());
}
/** @throws Exception If failed. */
@Test
public void testNotAllBltNodesPresent() throws Exception {
startTestGrids(true);
createEncryptedCache(grid(GRID_0), grid(GRID_1), cacheName(), null);
stopGrid(GRID_1);
grid(GRID_0).encryption().changeCacheGroupKey(Collections.singleton(cacheName())).get();
startGrid(GRID_1);
checkGroupKey(CU.cacheId(cacheName()), INITIAL_KEY_ID + 1, MAX_AWAIT_MILLIS);
}
/** @throws Exception If failed. */
@Test
public void testNodeFailsBeforePrepare() throws Exception {
checkNodeFailsDuringRotation(false, true, true);
}
/** @throws Exception If failed. */
@Test
public void testNodeFailsBeforePerform() throws Exception {
checkNodeFailsDuringRotation(false, false, true);
}
/** @throws Exception If failed. */
@Test
public void testNodeFailsAfterPrepare() throws Exception {
checkNodeFailsDuringRotation(false, true, false);
}
/** @throws Exception If failed. */
@Test
public void testCrdFailsAfterPrepare() throws Exception {
checkNodeFailsDuringRotation(true, true, false);
}
/** @throws Exception If failed. */
@Test
public void testNodeFailsAfterPerform() throws Exception {
checkNodeFailsDuringRotation(false, false, false);
}
/** @throws Exception If failed. */
@Test
public void testCrdFailsAfterPerform() throws Exception {
checkNodeFailsDuringRotation(true, false, false);
}
/**
* @param stopCrd {@code True} to stop coordinator.
* @param prepare {@code True} to stop on the prepare phase. {@code False} to stop on the perform phase.
* @param discoBlock {@code True} to block discovery, {@code False} to block communication SPI.
*/
private void checkNodeFailsDuringRotation(boolean stopCrd, boolean prepare, boolean discoBlock) throws Exception {
cleanPersistenceDir();
DistributedProcessType type = prepare ?
DistributedProcessType.CACHE_GROUP_KEY_CHANGE_PREPARE : DistributedProcessType.CACHE_GROUP_KEY_CHANGE_FINISH;
InitMessageDiscoveryHook locHook = new InitMessageDiscoveryHook(type);
if (discoBlock && stopCrd)
discoveryHook = locHook;
IgniteEx grid0 = startGrid(GRID_0);
if (discoBlock && !stopCrd)
discoveryHook = locHook;
IgniteEx grid1 = startGrid(GRID_1);
grid0.cluster().state(ClusterState.ACTIVE);
createEncryptedCache(grid0, grid1, cacheName(), null);
int grpId = CU.cacheId(cacheName());
checkGroupKey(grpId, INITIAL_KEY_ID, MAX_AWAIT_MILLIS);
TestRecordingCommunicationSpi spi = TestRecordingCommunicationSpi.spi(grid1);
if (!discoBlock) {
AtomicBoolean preparePhase = new AtomicBoolean(true);
spi.blockMessages((node, msg) -> {
if (msg instanceof SingleNodeMessage) {
boolean isPrepare = preparePhase.compareAndSet(true, false);
return prepare || !isPrepare;
}
return false;
});
}
String alive = stopCrd ? GRID_1 : GRID_0;
String stopped = stopCrd ? GRID_0 : GRID_1;
IgniteFuture<Void> changeFut = grid(alive).encryption().changeCacheGroupKey(Collections.singleton(cacheName()));
IgniteInternalFuture<?> stopFut = new GridFinishedFuture<>();
if (discoBlock) {
locHook.waitForBlocked(MAX_AWAIT_MILLIS);
stopGrid(stopped, true);
locHook.stopBlock();
}
else {
spi.waitForBlocked();
stopFut = runAsync(() -> stopGrid(stopped, true));
}
changeFut.get(MAX_AWAIT_MILLIS);
stopFut.get(MAX_AWAIT_MILLIS);
checkGroupKey(grpId, INITIAL_KEY_ID + 1, MAX_AWAIT_MILLIS);
IgniteEx stoppedNode = startGrid(stopped);
stoppedNode.resetLostPartitions(Collections.singleton(ENCRYPTED_CACHE));
awaitPartitionMapExchange();
checkGroupKey(grpId, INITIAL_KEY_ID + 1, MAX_AWAIT_MILLIS);
stoppedNode.encryption().changeCacheGroupKey(Collections.singleton(cacheName())).get(MAX_AWAIT_MILLIS);
checkGroupKey(grpId, INITIAL_KEY_ID + 2, MAX_AWAIT_MILLIS);
}
/**
* Ensures that we can rotate the key more than 255 times.
*
* @throws Exception If failed.
*/
@Test
public void testKeyIdentifierOverflow() throws Exception {
IgniteEx node = startTestGrids(true).get1();
createEncryptedCache(node, null, cacheName(), null, false);
int grpId = CU.cacheId(cacheName());
byte keyId = INITIAL_KEY_ID;
do {
node.encryption().changeCacheGroupKey(Collections.singleton(cacheName())).get();
// Validates reencryption of index partition.
checkGroupKey(grpId, ++keyId & 0xff, MAX_AWAIT_MILLIS);
} while (keyId != INITIAL_KEY_ID);
}
/**
* @throws Exception If failed.
*/
@Test
public void testMasterAndCacheGroupKeySimultaneousChange() throws Exception {
startTestGrids(true);
IgniteEx node0 = grid(GRID_0);
IgniteEx node1 = grid(GRID_1);
createEncryptedCache(node0, node1, cacheName(), null);
int grpId = CU.cacheId(cacheName());
assertTrue(checkMasterKeyName(DEFAULT_MASTER_KEY_NAME));
Random rnd = ThreadLocalRandom.current();
for (byte keyId = 1; keyId < 50; keyId++) {
String currMkName = node0.context().config().getEncryptionSpi().getMasterKeyName();
String newMkName = currMkName.equals(MASTER_KEY_NAME_2) ? MASTER_KEY_NAME_3 : MASTER_KEY_NAME_2;
boolean changeGrpFirst = rnd.nextBoolean();
IgniteFuture<Void> grpKeyFut;
IgniteFuture<Void> masterKeyFut;
if (changeGrpFirst) {
grpKeyFut = node0.encryption().changeCacheGroupKey(Collections.singleton(cacheName()));
masterKeyFut = node0.encryption().changeMasterKey(newMkName);
}
else {
masterKeyFut = node0.encryption().changeMasterKey(newMkName);
grpKeyFut = node0.encryption().changeCacheGroupKey(Collections.singleton(cacheName()));
}
masterKeyFut.get(MAX_AWAIT_MILLIS);
assertTrue(checkMasterKeyName(newMkName));
try {
grpKeyFut.get(MAX_AWAIT_MILLIS);
checkGroupKey(grpId, keyId, MAX_AWAIT_MILLIS);
}
catch (IgniteException e) {
assertTrue(e.getMessage().contains("Cache group key change was rejected. Master key has been changed."));
// Retry iteration.
keyId -= 1;
}
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testCacheStartDuringRotation() throws Exception {
T2<IgniteEx, IgniteEx> grids = startTestGrids(true);
createEncryptedCache(grids.get1(), grids.get2(), cacheName(), null);
TestRecordingCommunicationSpi commSpi = TestRecordingCommunicationSpi.spi(grids.get2());
commSpi.blockMessages((node, msg) -> msg instanceof SingleNodeMessage);
IgniteFuture<Void> fut = grids.get1().encryption().changeCacheGroupKey(Collections.singleton(cacheName()));
commSpi.waitForBlocked();
IgniteCache<Integer, Integer> cache = grids.get1().createCache(cacheConfiguration("cache1", null));
for (int i = 0; i < 100; i++)
cache.put(i, i);
commSpi.stopBlock();
fut.get();
checkGroupKey(CU.cacheId(cacheName()), INITIAL_KEY_ID + 1, MAX_AWAIT_MILLIS);
checkGroupKey(CU.cacheId("cache1"), INITIAL_KEY_ID, MAX_AWAIT_MILLIS);
}
/**
* @throws Exception If failed.
*/
@Test
public void testCacheStartSameGroupDuringRotation() throws Exception {
T2<IgniteEx, IgniteEx> grids = startTestGrids(true);
String grpName = "shared";
createEncryptedCache(grids.get1(), grids.get2(), cacheName(), grpName);
TestRecordingCommunicationSpi commSpi = TestRecordingCommunicationSpi.spi(grids.get2());
commSpi.blockMessages((node, msg) -> msg instanceof SingleNodeMessage);
IgniteFuture<Void> fut = grids.get1().encryption().changeCacheGroupKey(Collections.singleton(grpName));
commSpi.waitForBlocked();
IgniteCache<Integer, Integer> cache =
grids.get1().createCache(cacheConfiguration("cache1", grpName));
commSpi.stopBlock();
for (int i = 0; i < 100; i++)
cache.put(i, i);
fut.get();
checkGroupKey(CU.cacheId(grpName), INITIAL_KEY_ID + 1, MAX_AWAIT_MILLIS);
}
/**
* @throws Exception If failed.
*/
@Test
public void testChangeKeyDuringRebalancing() throws Exception {
T2<IgniteEx, IgniteEx> grids = startTestGrids(true);
IgniteEx node0 = grids.get1();
IgniteEx node1 = grids.get2();
createEncryptedCache(node0, node1, cacheName(), null);
loadData(500_000);
IgniteEx node2 = startGrid(GRID_2);
resetBaselineTopology();
int grpId = CU.cacheId(cacheName());
node2.encryption().changeCacheGroupKey(Collections.singleton(cacheName())).get(MAX_AWAIT_MILLIS);
awaitPartitionMapExchange();
checkGroupKey(grpId, INITIAL_KEY_ID + 1, MAX_AWAIT_MILLIS);
}
/**
* @throws Exception If failed.
*/
@Test
public void testNodeWithOlderKeyBecameCoordinator() throws Exception {
backups = 1;
startTestGrids(true);
IgniteEx node0 = grid(GRID_0);
IgniteEx node1 = grid(GRID_1);
createEncryptedCache(node0, node1, cacheName(), null);
int grpId = CU.cacheId(cacheName());
stopGrid(GRID_0);
// Changing encryption key on one node.
node1.context().encryption().changeCacheGroupKey(Collections.singleton(cacheName())).get(MAX_AWAIT_MILLIS);
checkGroupKey(grpId, INITIAL_KEY_ID + 1, MAX_AWAIT_MILLIS);
stopGrid(GRID_1);
// The node with only the old key ID has become the coordinator.
node0 = startGrid(GRID_0);
assertTrue(Collections.singleton(INITIAL_KEY_ID).containsAll(node0.context().encryption().groupKeyIds(grpId)));
node1 = startGrid(GRID_1);
node1.cluster().state(ClusterState.ACTIVE);
// Wait until cache will be reencrypted with the old key.
checkGroupKey(grpId, INITIAL_KEY_ID, MAX_AWAIT_MILLIS);
GridEncryptionManager encrMgr0 = node0.context().encryption();
GridEncryptionManager encrMgr1 = node1.context().encryption();
// Changing the encryption key is not possible until the WAL segment,
// encrypted (probably) with the previous key, is deleted.
assertThrowsAnyCause(log,
() -> encrMgr1.changeCacheGroupKey(Collections.singleton(cacheName())).get(MAX_AWAIT_MILLIS),
IgniteException.class,
"Cache group key change was rejected. Cannot add new key identifier, it's already present.");
long walIdx = node1.context().cache().context().wal().currentSegment();
// Simulate WAL segment deletion.
for (long n = 0; n <= walIdx; n++)
node1.context().encryption().onWalSegmentRemoved(walIdx);
encrMgr1.changeCacheGroupKey(Collections.singleton(cacheName())).get(MAX_AWAIT_MILLIS);
checkGroupKey(grpId, INITIAL_KEY_ID + 1, MAX_AWAIT_MILLIS);
checkEncryptedCaches(node0, node1);
walIdx = Math.max(node0.context().cache().context().wal().currentSegment(),
node1.context().cache().context().wal().currentSegment());
// Simulate WAL segment deletion.
for (long n = 0; n <= walIdx; n++) {
encrMgr0.onWalSegmentRemoved(walIdx);
encrMgr1.onWalSegmentRemoved(walIdx);
}
// Make sure the previous key has been removed.
checkKeysCount(node0, grpId, 1, MAX_AWAIT_MILLIS);
assertEquals(encrMgr1.groupKeyIds(grpId), encrMgr0.groupKeyIds(grpId));
}
/**
* Ensures that a node cannot join the cluster if it cannot replace an existing encryption key.
* <p>
* If the joining node has a different encryption key than the coordinator, but with the same identifier, it should
* not perform key rotation to a new key (recevied from coordinator) until the previous key is deleted.
*
* @throws Exception If failed.
*/
@Test
public void testNodeJoinRejectedIfKeyCannotBeReplaced() throws Exception {
backups = 2;
T2<IgniteEx, IgniteEx> nodes = startTestGrids(true);
startGrid(GRID_2);
resetBaselineTopology();
createEncryptedCache(nodes.get1(), nodes.get2(), cacheName(), null);
forceCheckpoint();
stopGrid(GRID_0);
stopGrid(GRID_1);
grid(GRID_2).encryption().changeCacheGroupKey(Collections.singleton(cacheName())).get(MAX_AWAIT_MILLIS);
int grpId = CU.cacheId(cacheName());
checkGroupKey(grpId, INITIAL_KEY_ID + 1, MAX_AWAIT_MILLIS);
grid(GRID_2).encryption().changeCacheGroupKey(Collections.singleton(cacheName())).get(MAX_AWAIT_MILLIS);
checkGroupKey(grpId, INITIAL_KEY_ID + 2, MAX_AWAIT_MILLIS);
stopGrid(GRID_2);
startTestGrids(false);
checkGroupKey(grpId, INITIAL_KEY_ID, MAX_AWAIT_MILLIS);
grid(GRID_0).encryption().changeCacheGroupKey(Collections.singleton(cacheName())).get(MAX_AWAIT_MILLIS);
assertThrowsAnyCause(log,
() -> startGrid(GRID_2),
IgniteSpiException.class,
"Cache key differs! Node join is rejected.");
}
/**
* @throws Exception If failed.
*/
@Test
public void testKeyChangeWithNodeFilter() throws Exception {
startTestGrids(true);
IgniteEx node0 = grid(GRID_0);
IgniteEx node1 = grid(GRID_1);
Object nodeId0 = node0.localNode().consistentId();
Object nodeId1 = node1.localNode().consistentId();
String cache1 = cacheName();
String cache2 = "cache2";
node0.createCache(cacheConfiguration(cache1, null)
.setNodeFilter(node -> !node.consistentId().equals(nodeId0)));
node0.createCache(cacheConfiguration(cache2, null)
.setNodeFilter(node -> !node.consistentId().equals(nodeId1)));
loadData(10_000);
forceCheckpoint();
int grpId1 = CU.cacheId(cache1);
int grpId2 = CU.cacheId(cache2);
node0.encryption().changeCacheGroupKey(Arrays.asList(cache1, cache2)).get();
List<Integer> keys0 = node0.context().encryption().groupKeyIds(grpId1);
List<Integer> keys1 = node1.context().encryption().groupKeyIds(grpId1);
assertEquals(2, keys0.size());
assertEquals(2, keys1.size());
assertTrue(keys0.containsAll(keys1));
keys0 = node0.context().encryption().groupKeyIds(grpId2);
keys1 = node1.context().encryption().groupKeyIds(grpId2);
assertEquals(2, keys0.size());
assertEquals(2, keys1.size());
assertTrue(keys0.containsAll(keys1));
checkGroupKey(grpId1, INITIAL_KEY_ID + 1, MAX_AWAIT_MILLIS);
checkGroupKey(grpId2, INITIAL_KEY_ID + 1, MAX_AWAIT_MILLIS);
stopAllGrids();
startTestGrids(false);
node0 = grid(GRID_0);
node1 = grid(GRID_1);
IgniteCache<Object, Object> allNodesCache = node0.createCache("cacheX");
// Previous keys must be deleted when the corresponding WAL segment is deleted, so we adding data on all nodes.
long endTime = U.currentTimeMillis() + 30_000;
int cntr = 0;
do {
allNodesCache.put(cntr, String.valueOf(cntr));
if (node0.context().encryption().groupKeyIds(grpId1).size() == 1 &&
node1.context().encryption().groupKeyIds(grpId1).size() == 1 &&
node0.context().encryption().groupKeyIds(grpId2).size() == 1 &&
node1.context().encryption().groupKeyIds(grpId2).size() == 1)
break;
++cntr;
} while (U.currentTimeMillis() < endTime);
assertEquals(1, node0.context().encryption().groupKeyIds(grpId1).size());
assertEquals(1, node0.context().encryption().groupKeyIds(grpId2).size());
assertEquals(node0.context().encryption().groupKeyIds(grpId1), node1.context().encryption().groupKeyIds(grpId1));
assertEquals(node0.context().encryption().groupKeyIds(grpId2), node1.context().encryption().groupKeyIds(grpId2));
checkGroupKey(grpId1, INITIAL_KEY_ID + 1, MAX_AWAIT_MILLIS);
checkGroupKey(grpId2, INITIAL_KEY_ID + 1, MAX_AWAIT_MILLIS);
checkEncryptedCaches(node0, node1);
}
/**
* @throws Exception If failed.
*/
@Test
public void testBasicChangeWithConstantLoad() throws Exception {
walSegments = 20;
startTestGrids(true);
IgniteEx node0 = grid(GRID_0);
IgniteEx node1 = grid(GRID_1);
GridEncryptionManager encrMgr0 = node0.context().encryption();
GridEncryptionManager encrMgr1 = node1.context().encryption();
createEncryptedCache(node0, node1, cacheName(), null);
forceCheckpoint();
int grpId = CU.cacheId(cacheName());
IgniteInternalFuture<?> loadFut = loadDataAsync(node0);
try {
IgniteCache<Object, Object> cache = node0.cache(cacheName());
boolean success = waitForCondition(() -> cache.size() > 2000, MAX_AWAIT_MILLIS);
assertTrue(success);
node0.encryption().changeCacheGroupKey(Collections.singleton(cacheName())).get(MAX_AWAIT_MILLIS);
awaitEncryption(G.allGrids(), grpId, MAX_AWAIT_MILLIS);
waitForCondition(() ->
encrMgr0.groupKeyIds(grpId).size() == 1 && encrMgr1.groupKeyIds(grpId).size() == 1, MAX_AWAIT_MILLIS);
}
finally {
loadFut.cancel();
}
checkGroupKey(grpId, INITIAL_KEY_ID + 1, MAX_AWAIT_MILLIS);
assertEquals(node0.cluster().localNode().id().toString(), 1, encrMgr0.groupKeyIds(grpId).size());
assertEquals(node1.cluster().localNode().id().toString(), 1, encrMgr1.groupKeyIds(grpId).size());
}
/**
* Ensures that unused key will be removed even if user cleaned wal archive folder manually.
*
* @throws Exception If failed.
*/
@Test
public void testWalArchiveCleanup() throws Exception {
IgniteEx node = startGrid(GRID_0);
node.cluster().state(ClusterState.ACTIVE);
createEncryptedCache(node, null, cacheName(), null);
node.encryption().changeCacheGroupKey(Collections.singleton(cacheName())).get();
IgniteWriteAheadLogManager walMgr = node.context().cache().context().wal();
long reservedIdx = walMgr.currentSegment();
assertTrue(walMgr.reserve(new WALPointer(reservedIdx, 0, 0)));
while (walMgr.lastArchivedSegment() < reservedIdx) {
long val = ThreadLocalRandom.current().nextLong();
node.cache(cacheName()).put(val, String.valueOf(val));
}
forceCheckpoint();
int grpId = CU.cacheId(cacheName());
assertEquals(2, node.context().encryption().groupKeyIds(grpId).size());
NodeFileTree ft = node.context().pdsFolderResolver().fileTree();
stopAllGrids();
// Cleanup WAL arcive folder.
assertTrue(U.delete(ft.walArchive().getParentFile()));
node = startGrid(GRID_0);
node.cluster().state(ClusterState.ACTIVE);
while (node.context().encryption().groupKeyIds(grpId).size() != 1) {
long val = ThreadLocalRandom.current().nextLong();
node.cache(cacheName()).put(val, String.valueOf(val));
}
checkGroupKey(grpId, INITIAL_KEY_ID + 1, MAX_AWAIT_MILLIS);
}
/**
* @param grid Grid.
* @return Future for this operation.
*/
private IgniteInternalFuture<?> loadDataAsync(Ignite grid) {
return runAsync(() -> {
long cntr = grid.cache(cacheName()).size();
try (IgniteDataStreamer<Long, String> streamer = grid.dataStreamer(cacheName())) {
while (!Thread.currentThread().isInterrupted()) {
streamer.addData(cntr, String.valueOf(cntr));
++cntr;
}
}
});
}
/**
* @throws Exception If failed.
*/
@Test
public void testCacheStartOnClientDuringRotation() throws Exception {
T2<IgniteEx, IgniteEx> nodes = startTestGrids(true);
IgniteEx node0 = nodes.get1();
IgniteEx node1 = nodes.get2();
IgniteEx client = startClientGrid(getConfiguration("client"));
node0.cluster().state(ClusterState.ACTIVE);
String grpName = "shared";
createEncryptedCache(client, null, cacheName(), grpName);
awaitPartitionMapExchange();
TestRecordingCommunicationSpi commSpi = TestRecordingCommunicationSpi.spi(node1);
commSpi.blockMessages((node, message) -> message instanceof SingleNodeMessage);
IgniteFuture<Void> changeKeyFut = node0.encryption().changeCacheGroupKey(Collections.singleton(grpName));
commSpi.waitForBlocked();
String cacheName = "userCache";
IgniteInternalFuture<?> cacheStartFut = runAsync(() -> {
client.getOrCreateCache(cacheConfiguration(cacheName, grpName));
});
commSpi.stopBlock();
changeKeyFut.get(MAX_AWAIT_MILLIS);
cacheStartFut.get(MAX_AWAIT_MILLIS);
IgniteCache<Integer, String> cache = client.cache(cacheName);
for (int i = 0; i < 200; i++)
cache.put(i, String.valueOf(i));
checkEncryptedCaches(node0, client);
checkGroupKey(CU.cacheId(grpName), INITIAL_KEY_ID + 1, MAX_AWAIT_MILLIS);
checkEncryptedCaches(node0, node1);
}
/**
* @throws Exception If failed.
*/
@Test
public void testClientJoinDuringRotation() throws Exception {
T2<IgniteEx, IgniteEx> nodes = startTestGrids(true);
IgniteEx node0 = nodes.get1();
IgniteEx node1 = nodes.get2();
node0.cluster().state(ClusterState.ACTIVE);
createEncryptedCache(node0, node1, cacheName(), null);
awaitPartitionMapExchange();
TestRecordingCommunicationSpi commSpi = TestRecordingCommunicationSpi.spi(node1);
commSpi.blockMessages((node, message) -> message instanceof SingleNodeMessage);
IgniteFuture<Void> changeKeyFut = node0.encryption().changeCacheGroupKey(Collections.singleton(cacheName()));
commSpi.waitForBlocked();
IgniteEx client = startClientGrid(getConfiguration("client"));
assertTrue(!changeKeyFut.isDone());
commSpi.stopBlock();
changeKeyFut.get(MAX_AWAIT_MILLIS);
checkEncryptedCaches(node0, client);
checkGroupKey(CU.cacheId(cacheName()), INITIAL_KEY_ID + 1, MAX_AWAIT_MILLIS);
}
/**
* Ensures that node can join after rotation of encryption key.
*
* @throws Exception If failed.
*/
@Test
public void testNodeJoinAfterRotation() throws Exception {
backups = 1;
T2<IgniteEx, IgniteEx> nodes = startTestGrids(true);
createEncryptedCache(nodes.get1(), nodes.get2(), cacheName(), null);
forceCheckpoint();
stopGrid(GRID_1);
resetBaselineTopology();
nodes.get1().encryption().changeCacheGroupKey(Collections.singleton(cacheName())).get();
startGrid(GRID_1);
resetBaselineTopology();
awaitPartitionMapExchange();
int grpId = CU.cacheId(cacheName());
checkGroupKey(grpId, INITIAL_KEY_ID + 1, MAX_AWAIT_MILLIS);
checkEncryptedCaches(grid(GRID_0), grid(GRID_1));
GridEncryptionManager encrMgr0 = grid(GRID_0).context().encryption();
GridEncryptionManager encrMgr1 = grid(GRID_1).context().encryption();
long maxWalIdx = Math.max(nodes.get1().context().cache().context().wal().currentSegment(),
nodes.get2().context().cache().context().wal().currentSegment());
for (long idx = 0; idx <= maxWalIdx; idx++) {
encrMgr0.onWalSegmentRemoved(maxWalIdx);
encrMgr1.onWalSegmentRemoved(maxWalIdx);
}
checkKeysCount(grid(GRID_1), grpId, 1, MAX_AWAIT_MILLIS);
checkKeysCount(grid(GRID_0), grpId, 1, MAX_AWAIT_MILLIS);
startGrid(GRID_2);
resetBaselineTopology();
awaitPartitionMapExchange();
checkGroupKey(grpId, INITIAL_KEY_ID + 1, MAX_AWAIT_MILLIS);
checkEncryptedCaches(grid(GRID_2), nodes.get1());
assertEquals(encrMgr0.groupKeyIds(grpId), grid(GRID_2).context().encryption().groupKeyIds(grpId));
}
/**
* @throws Exception If failed.
*/
@Test
public void testWrongCacheGroupSpecified() throws Exception {
T2<IgniteEx, IgniteEx> grids = startTestGrids(true);
IgniteEx node0 = grids.get1();
IgniteEx node1 = grids.get2();
assertThrowsAnyCause(log,
() -> node0.encryption().changeCacheGroupKey(Collections.singleton(cacheName())).get(MAX_AWAIT_MILLIS),
IgniteException.class,
"Cache group key change was rejected. Cache or group \"" + cacheName() + "\" doesn't exists");
node0.createCache(new CacheConfiguration<>(cacheName()).setNodeFilter(node -> node.equals(node0.localNode())));
assertThrowsAnyCause(log,
() -> node1.encryption().changeCacheGroupKey(Collections.singleton(cacheName())).get(MAX_AWAIT_MILLIS),
IgniteException.class,
"Cache group key change was rejected. Cache or group \"" + cacheName() + "\" is not encrypted.");
node0.destroyCache(cacheName());
awaitPartitionMapExchange();
String grpName = "cacheGroup1";
createEncryptedCache(node0, node1, cacheName(), grpName);
assertThrowsAnyCause(log,
() -> node0.encryption().changeCacheGroupKey(Collections.singleton(cacheName())).get(MAX_AWAIT_MILLIS),
IgniteException.class,
"Cache group key change was rejected. Cache or group \"" + cacheName() + "\" is a part of group \"" +
grpName + "\". Provide group name instead of cache name for shared groups.");
}
/** @throws Exception If failed. */
@Test
public void testChangeCacheGroupKeyWithoutWAL() throws Exception {
walMode = NONE;
T2<IgniteEx, IgniteEx> grids = startTestGrids(true);
createEncryptedCache(grids.get1(), grids.get2(), cacheName(), null);
IgniteEx node0 = grids.get1();
node0.encryption().changeCacheGroupKey(Collections.singleton(cacheName())).get();
int grpId = CU.cacheId(cacheName());
checkGroupKey(grpId, INITIAL_KEY_ID + 1, MAX_AWAIT_MILLIS);
assertEquals(1, node0.context().encryption().groupKeyIds(grpId).size());
assertEquals(1, grids.get2().context().encryption().groupKeyIds(grpId).size());
}
/**
* Custom discovery hook to block distributed process.
*/
private static class InitMessageDiscoveryHook extends DiscoveryHook {
/**
* Latch to sync execution.
*/
private final CountDownLatch unlockLatch = new CountDownLatch(1);
/**
* Latch to sync execution.
*/
private final CountDownLatch blockedLatch = new CountDownLatch(1);
/**
* Distributed process type.
*/
private final DistributedProcessType type;
/**
* @param type Distributed process type.
*/
private InitMessageDiscoveryHook(DistributedProcessType type) {
this.type = type;
}
/** {@inheritDoc} */
@Override public void beforeDiscovery(DiscoveryCustomMessage customMsg) {
if (!(customMsg instanceof InitMessage))
return;
InitMessage<Serializable> msg = (InitMessage<Serializable>)customMsg;
if (msg.type() != type.ordinal())
return;
try {
blockedLatch.countDown();
unlockLatch.await(MAX_AWAIT_MILLIS, TimeUnit.MILLISECONDS);
}
catch (InterruptedException ignore) {
Thread.currentThread().interrupt();
}
}
/**
* @param timeout Timeout in milliseconds.
* @throws InterruptedException If interrupted.
*/
public void waitForBlocked(long timeout) throws InterruptedException {
blockedLatch.await(timeout, TimeUnit.MILLISECONDS);
}
/** */
public void stopBlock() {
unlockLatch.countDown();
}
}
}
|
apache/pulsar | 36,302 | pulsar-client/src/main/java/org/apache/pulsar/client/impl/TransactionMetaStoreHandler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.client.impl;
import com.google.common.annotations.VisibleForTesting;
import io.netty.buffer.ByteBuf;
import io.netty.util.Recycler;
import io.netty.util.ReferenceCountUtil;
import io.netty.util.Timeout;
import io.netty.util.Timer;
import io.netty.util.TimerTask;
import java.io.Closeable;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.pulsar.client.api.PulsarClientException;
import org.apache.pulsar.client.api.transaction.TransactionCoordinatorClientException;
import org.apache.pulsar.client.api.transaction.TxnID;
import org.apache.pulsar.common.api.proto.BaseCommand;
import org.apache.pulsar.common.api.proto.CommandAddPartitionToTxnResponse;
import org.apache.pulsar.common.api.proto.CommandAddSubscriptionToTxnResponse;
import org.apache.pulsar.common.api.proto.CommandEndTxnResponse;
import org.apache.pulsar.common.api.proto.CommandNewTxnResponse;
import org.apache.pulsar.common.api.proto.ProtocolVersion;
import org.apache.pulsar.common.api.proto.ServerError;
import org.apache.pulsar.common.api.proto.Subscription;
import org.apache.pulsar.common.api.proto.TxnAction;
import org.apache.pulsar.common.protocol.Commands;
import org.apache.pulsar.common.util.Backoff;
import org.apache.pulsar.common.util.BackoffBuilder;
import org.apache.pulsar.common.util.collections.ConcurrentLongHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Handler for transaction meta store.
*/
public class TransactionMetaStoreHandler extends HandlerState
implements ConnectionHandler.Connection, Closeable, TimerTask {
private static final Logger LOG = LoggerFactory.getLogger(TransactionMetaStoreHandler.class);
private final long transactionCoordinatorId;
private final ConnectionHandler connectionHandler;
private final ConcurrentLongHashMap<OpBase<?>> pendingRequests =
ConcurrentLongHashMap.<OpBase<?>>newBuilder()
.expectedItems(16)
.concurrencyLevel(1)
.build();
private final ConcurrentLinkedQueue<RequestTime> timeoutQueue;
protected final Timer timer;
private final ExecutorService internalPinnedExecutor;
private static class RequestTime {
final long creationTimeMs;
final long requestId;
public RequestTime(long creationTime, long requestId) {
this.creationTimeMs = creationTime;
this.requestId = requestId;
}
}
private final boolean blockIfReachMaxPendingOps;
private final Semaphore semaphore;
private Timeout requestTimeout;
private final CompletableFuture<Void> connectFuture;
private final long lookupDeadline;
private final AtomicInteger previousExceptionCount = new AtomicInteger();
public TransactionMetaStoreHandler(long transactionCoordinatorId, PulsarClientImpl pulsarClient, String topic,
CompletableFuture<Void> connectFuture) {
super(pulsarClient, topic);
this.transactionCoordinatorId = transactionCoordinatorId;
this.timeoutQueue = new ConcurrentLinkedQueue<>();
this.blockIfReachMaxPendingOps = true;
this.semaphore = new Semaphore(1000);
this.requestTimeout = pulsarClient.timer().newTimeout(this,
pulsarClient.getConfiguration().getOperationTimeoutMs(), TimeUnit.MILLISECONDS);
this.connectionHandler = new ConnectionHandler(
this,
new BackoffBuilder()
.setInitialTime(pulsarClient.getConfiguration().getInitialBackoffIntervalNanos(), TimeUnit.NANOSECONDS)
.setMax(pulsarClient.getConfiguration().getMaxBackoffIntervalNanos(), TimeUnit.NANOSECONDS)
.setMandatoryStop(100, TimeUnit.MILLISECONDS)
.create(),
this);
this.connectFuture = connectFuture;
this.internalPinnedExecutor = pulsarClient.getInternalExecutorService();
this.timer = pulsarClient.timer();
this.lookupDeadline = System.currentTimeMillis() + client.getConfiguration().getLookupTimeoutMs();
}
public void start() {
this.connectionHandler.grabCnx();
}
@Override
public boolean connectionFailed(PulsarClientException exception) {
boolean nonRetriableError = !PulsarClientException.isRetriableError(exception);
boolean timeout = System.currentTimeMillis() > lookupDeadline;
if (nonRetriableError || timeout) {
exception.setPreviousExceptionCount(previousExceptionCount);
if (connectFuture.completeExceptionally(exception)) {
if (nonRetriableError) {
LOG.error("Transaction meta handler with transaction coordinator id {} connection failed.",
transactionCoordinatorId, exception);
} else {
LOG.error("Transaction meta handler with transaction coordinator id {} connection failed after "
+ "timeout", transactionCoordinatorId, exception);
}
setState(State.Failed);
return false;
}
} else {
previousExceptionCount.getAndIncrement();
}
return true;
}
@Override
public CompletableFuture<Void> connectionOpened(ClientCnx cnx) {
final CompletableFuture<Void> future = new CompletableFuture<>();
internalPinnedExecutor.execute(() -> {
LOG.info("Transaction meta handler with transaction coordinator id {} connection opened.",
transactionCoordinatorId);
State state = getState();
if (state == State.Closing || state == State.Closed) {
setState(State.Closed);
failPendingRequest();
future.complete(null);
return;
}
// if broker protocol version < 19, don't send TcClientConnectRequest to broker.
if (cnx.getRemoteEndpointProtocolVersion() > ProtocolVersion.v18.getValue()) {
long requestId = client.newRequestId();
ByteBuf request = Commands.newTcClientConnectRequest(transactionCoordinatorId, requestId);
cnx.sendRequestWithId(request, requestId).thenRun(() -> {
internalPinnedExecutor.execute(() -> {
LOG.info("Transaction coordinator client connect success! tcId : {}", transactionCoordinatorId);
if (registerToConnection(cnx)) {
this.connectionHandler.resetBackoff();
pendingRequests.forEach((requestID, opBase) -> checkStateAndSendRequest(opBase));
}
future.complete(null);
});
}).exceptionally((e) -> {
internalPinnedExecutor.execute(() -> {
LOG.error("Transaction coordinator client connect fail! tcId : {}",
transactionCoordinatorId, e.getCause());
if (getState() == State.Closing || getState() == State.Closed
|| e.getCause() instanceof PulsarClientException.NotAllowedException) {
setState(State.Closed);
cnx.channel().close();
future.complete(null);
} else {
future.completeExceptionally(e.getCause());
}
});
return null;
});
} else {
LOG.warn("Can not connect to the transaction coordinator because the protocol version {} is "
+ "lower than 19", cnx.getRemoteEndpointProtocolVersion());
registerToConnection(cnx);
future.complete(null);
}
});
return future;
}
private boolean registerToConnection(ClientCnx cnx) {
if (changeToReadyState()) {
connectionHandler.setClientCnx(cnx);
cnx.registerTransactionMetaStoreHandler(transactionCoordinatorId, this);
connectFuture.complete(null);
return true;
} else {
State state = getState();
cnx.channel().close();
connectFuture.completeExceptionally(
new IllegalStateException("Failed to change the state from " + state + " to Ready"));
return false;
}
}
private void failPendingRequest() {
// this method is executed in internalPinnedExecutor.
pendingRequests.forEach((k, op) -> {
if (op != null && !op.callback.isDone()) {
op.callback.completeExceptionally(new PulsarClientException.AlreadyClosedException(
"Could not get response from transaction meta store when "
+ "the transaction meta store has already close."));
onResponse(op);
}
});
this.pendingRequests.clear();
}
public CompletableFuture<TxnID> newTransactionAsync(long timeout, TimeUnit unit) {
if (LOG.isDebugEnabled()) {
LOG.debug("New transaction with timeout in ms {}", unit.toMillis(timeout));
}
CompletableFuture<TxnID> callback = new CompletableFuture<>();
if (!canSendRequest(callback)) {
return callback;
}
long requestId = client.newRequestId();
ByteBuf cmd = Commands.newTxn(transactionCoordinatorId, requestId, unit.toMillis(timeout));
String description = String.format("Create new transaction %s", transactionCoordinatorId);
OpForTxnIdCallBack op = OpForTxnIdCallBack.create(cmd, callback, client, description, cnx());
internalPinnedExecutor.execute(() -> {
pendingRequests.put(requestId, op);
timeoutQueue.add(new RequestTime(System.currentTimeMillis(), requestId));
if (!checkStateAndSendRequest(op)) {
pendingRequests.remove(requestId);
}
});
return callback;
}
void handleNewTxnResponse(CommandNewTxnResponse response) {
final boolean hasError = response.hasError();
final ServerError error;
final String message;
if (hasError) {
error = response.getError();
message = response.getMessage();
} else {
error = null;
message = null;
}
final TxnID txnID = new TxnID(response.getTxnidMostBits(), response.getTxnidLeastBits());
final long requestId = response.getRequestId();
internalPinnedExecutor.execute(() -> {
OpForTxnIdCallBack op = (OpForTxnIdCallBack) pendingRequests.remove(requestId);
if (op == null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Got new txn response for transaction {}", txnID);
}
return;
}
if (!hasError) {
if (LOG.isDebugEnabled()) {
LOG.debug("Got new txn response {} for request {}", txnID, requestId);
}
op.callback.complete(txnID);
} else {
if (checkIfNeedRetryByError(error, message, op)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Get a response for the {} request {} error "
+ "TransactionCoordinatorNotFound and try it again",
BaseCommand.Type.NEW_TXN.name(), requestId);
}
pendingRequests.put(requestId, op);
timer.newTimeout(timeout -> {
internalPinnedExecutor.execute(() -> {
if (!pendingRequests.containsKey(requestId)) {
if (LOG.isDebugEnabled()) {
LOG.debug("The request {} already timeout", requestId);
}
return;
}
if (!checkStateAndSendRequest(op)) {
pendingRequests.remove(requestId);
}
});
}
, op.backoff.next(), TimeUnit.MILLISECONDS);
return;
}
LOG.error("Got {} for request {} error {}", BaseCommand.Type.NEW_TXN.name(),
requestId, error);
}
onResponse(op);
});
}
public CompletableFuture<Void> addPublishPartitionToTxnAsync(TxnID txnID, List<String> partitions) {
if (LOG.isDebugEnabled()) {
LOG.debug("Add publish partition {} to txn {}", partitions, txnID);
}
CompletableFuture<Void> callback = new CompletableFuture<>();
if (!canSendRequest(callback)) {
return callback;
}
long requestId = client.newRequestId();
ByteBuf cmd = Commands.newAddPartitionToTxn(
requestId, txnID.getLeastSigBits(), txnID.getMostSigBits(), partitions);
String description = String.format("Add partition %s to TXN %s", String.valueOf(partitions),
String.valueOf(txnID));
OpForVoidCallBack op = OpForVoidCallBack
.create(cmd, callback, client, description, cnx());
internalPinnedExecutor.execute(() -> {
pendingRequests.put(requestId, op);
timeoutQueue.add(new RequestTime(System.currentTimeMillis(), requestId));
if (!checkStateAndSendRequest(op)) {
pendingRequests.remove(requestId);
}
});
return callback;
}
void handleAddPublishPartitionToTxnResponse(CommandAddPartitionToTxnResponse response) {
final boolean hasError = response.hasError();
final ServerError error;
final String message;
if (hasError) {
error = response.getError();
message = response.getMessage();
} else {
error = null;
message = null;
}
final TxnID txnID = new TxnID(response.getTxnidMostBits(), response.getTxnidLeastBits());
final long requestId = response.getRequestId();
internalPinnedExecutor.execute(() -> {
OpForVoidCallBack op = (OpForVoidCallBack) pendingRequests.remove(requestId);
if (op == null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Got add publish partition to txn response for transaction {}", txnID);
}
return;
}
if (!hasError) {
if (LOG.isDebugEnabled()) {
LOG.debug("Add publish partition for request {} success.", requestId);
}
op.callback.complete(null);
} else {
if (checkIfNeedRetryByError(error, message, op)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Get a response for the {} request {} "
+ " error TransactionCoordinatorNotFound and try it again",
BaseCommand.Type.ADD_PARTITION_TO_TXN.name(), requestId);
}
pendingRequests.put(requestId, op);
timer.newTimeout(timeout -> {
internalPinnedExecutor.execute(() -> {
if (!pendingRequests.containsKey(requestId)) {
if (LOG.isDebugEnabled()) {
LOG.debug("The request {} already timeout", requestId);
}
return;
}
if (!checkStateAndSendRequest(op)) {
pendingRequests.remove(requestId);
}
});
}
, op.backoff.next(), TimeUnit.MILLISECONDS);
return;
}
LOG.error("{} for request {}, transaction {}, error: {}",
BaseCommand.Type.ADD_PARTITION_TO_TXN.name(), requestId, txnID, error);
}
onResponse(op);
});
}
public CompletableFuture<Void> addSubscriptionToTxn(TxnID txnID, List<Subscription> subscriptionList) {
if (LOG.isDebugEnabled()) {
LOG.debug("Add subscription {} to txn {}.", subscriptionList, txnID);
}
CompletableFuture<Void> callback = new CompletableFuture<>();
if (!canSendRequest(callback)) {
return callback;
}
long requestId = client.newRequestId();
ByteBuf cmd = Commands.newAddSubscriptionToTxn(
requestId, txnID.getLeastSigBits(), txnID.getMostSigBits(), subscriptionList);
String description = String.format("Add subscription %s to TXN %s", toStringSubscriptionList(subscriptionList),
String.valueOf(txnID));
OpForVoidCallBack op = OpForVoidCallBack.create(cmd, callback, client, description, cnx());
internalPinnedExecutor.execute(() -> {
pendingRequests.put(requestId, op);
timeoutQueue.add(new RequestTime(System.currentTimeMillis(), requestId));
if (!checkStateAndSendRequest(op)) {
pendingRequests.remove(requestId);
}
});
return callback;
}
private String toStringSubscriptionList(List<Subscription> list) {
if (list == null || list.isEmpty()) {
return "[]";
}
StringBuilder builder = new StringBuilder("[");
for (Subscription subscription : list) {
builder.append(String.format("%s %s", subscription.getTopic(), subscription.getSubscription()));
}
return builder.append("]").toString();
}
public void handleAddSubscriptionToTxnResponse(CommandAddSubscriptionToTxnResponse response) {
final boolean hasError = response.hasError();
final ServerError error;
final String message;
if (hasError) {
error = response.getError();
message = response.getMessage();
} else {
error = null;
message = null;
}
final long requestId = response.getRequestId();
final TxnID txnID = new TxnID(response.getTxnidMostBits(), response.getTxnidLeastBits());
internalPinnedExecutor.execute(() -> {
OpForVoidCallBack op = (OpForVoidCallBack) pendingRequests.remove(requestId);
if (op == null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Add subscription to txn timeout for request {}.", requestId);
}
return;
}
if (!hasError) {
if (LOG.isDebugEnabled()) {
LOG.debug("Add subscription to txn success for request {}.", requestId);
}
op.callback.complete(null);
} else {
LOG.error("Add subscription to txn failed for request {}, transaction {}, error: {}",
requestId, txnID, error);
if (checkIfNeedRetryByError(error, message, op)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Get a response for {} request {} error TransactionCoordinatorNotFound and try it"
+ " again", BaseCommand.Type.ADD_SUBSCRIPTION_TO_TXN.name(), requestId);
}
pendingRequests.put(requestId, op);
timer.newTimeout(timeout -> {
internalPinnedExecutor.execute(() -> {
if (!pendingRequests.containsKey(requestId)) {
if (LOG.isDebugEnabled()) {
LOG.debug("The request {} already timeout", requestId);
}
return;
}
if (!checkStateAndSendRequest(op)) {
pendingRequests.remove(requestId);
}
});
}
, op.backoff.next(), TimeUnit.MILLISECONDS);
return;
}
LOG.error("{} failed for request {} error {}.", BaseCommand.Type.ADD_SUBSCRIPTION_TO_TXN.name(),
requestId, error);
}
onResponse(op);
});
}
public CompletableFuture<Void> endTxnAsync(TxnID txnID, TxnAction action) {
if (LOG.isDebugEnabled()) {
LOG.debug("End txn {}, action {}", txnID, action);
}
CompletableFuture<Void> callback = new CompletableFuture<>();
if (!canSendRequest(callback)) {
return callback;
}
long requestId = client.newRequestId();
BaseCommand cmd = Commands.newEndTxn(requestId, txnID.getLeastSigBits(), txnID.getMostSigBits(), action);
ByteBuf buf = Commands.serializeWithSize(cmd);
String description = String.format("End [%s] TXN %s", String.valueOf(action), String.valueOf(txnID));
OpForVoidCallBack op = OpForVoidCallBack.create(buf, callback, client, description, cnx());
internalPinnedExecutor.execute(() -> {
pendingRequests.put(requestId, op);
timeoutQueue.add(new RequestTime(System.currentTimeMillis(), requestId));
if (!checkStateAndSendRequest(op)) {
pendingRequests.remove(requestId);
}
});
return callback;
}
void handleEndTxnResponse(CommandEndTxnResponse response) {
final boolean hasError = response.hasError();
final ServerError error;
final String message;
if (hasError) {
error = response.getError();
message = response.getMessage();
} else {
error = null;
message = null;
}
final TxnID txnID = new TxnID(response.getTxnidMostBits(), response.getTxnidLeastBits());
final long requestId = response.getRequestId();
internalPinnedExecutor.execute(() -> {
OpForVoidCallBack op = (OpForVoidCallBack) pendingRequests.remove(requestId);
if (op == null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Got end txn response for transaction but no requests pending for txn {}", txnID);
}
return;
}
if (!hasError) {
if (LOG.isDebugEnabled()) {
LOG.debug("Got end txn response success for request {}, txn {}", requestId, txnID);
}
op.callback.complete(null);
} else {
if (checkIfNeedRetryByError(error, message, op)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Get a response for the {} request {} error "
+ "TransactionCoordinatorNotFound and try it again",
BaseCommand.Type.END_TXN.name(), requestId);
}
pendingRequests.put(requestId, op);
timer.newTimeout(timeout -> {
internalPinnedExecutor.execute(() -> {
if (!pendingRequests.containsKey(requestId)) {
if (LOG.isDebugEnabled()) {
LOG.debug("The request {} already timeout", requestId);
}
return;
}
if (!checkStateAndSendRequest(op)) {
pendingRequests.remove(requestId);
}
});
}
, op.backoff.next(), TimeUnit.MILLISECONDS);
return;
}
LOG.error("Got {} response for request {}, transaction {}, error: {}",
BaseCommand.Type.END_TXN.name(), requestId, txnID, error);
}
onResponse(op);
});
}
private boolean checkIfNeedRetryByError(ServerError error, String message, OpBase<?> op) {
if (error == ServerError.TransactionCoordinatorNotFound) {
if (getState() != State.Connecting) {
connectionHandler.reconnectLater(new TransactionCoordinatorClientException
.CoordinatorNotFoundException(message));
}
return true;
}
if (op != null) {
op.callback.completeExceptionally(getExceptionByServerError(error, message));
}
return false;
}
private abstract static class OpBase<T> {
protected ByteBuf cmd;
protected CompletableFuture<T> callback;
protected Backoff backoff;
protected String description;
protected ClientCnx clientCnx;
abstract void recycle();
}
private static class OpForTxnIdCallBack extends OpBase<TxnID> {
static OpForTxnIdCallBack create(ByteBuf cmd, CompletableFuture<TxnID> callback, PulsarClientImpl client,
String description, ClientCnx clientCnx) {
OpForTxnIdCallBack op = RECYCLER.get();
op.callback = callback;
op.cmd = cmd;
op.backoff = new BackoffBuilder()
.setInitialTime(client.getConfiguration().getInitialBackoffIntervalNanos(),
TimeUnit.NANOSECONDS)
.setMax(client.getConfiguration().getMaxBackoffIntervalNanos() / 10, TimeUnit.NANOSECONDS)
.setMandatoryStop(0, TimeUnit.MILLISECONDS)
.create();
op.description = description;
op.clientCnx = clientCnx;
return op;
}
private OpForTxnIdCallBack(Recycler.Handle<OpForTxnIdCallBack> recyclerHandle) {
this.recyclerHandle = recyclerHandle;
}
@Override
void recycle() {
this.backoff = null;
this.cmd = null;
this.callback = null;
this.description = null;
this.clientCnx = null;
recyclerHandle.recycle(this);
}
private final Recycler.Handle<OpForTxnIdCallBack> recyclerHandle;
private static final Recycler<OpForTxnIdCallBack> RECYCLER = new Recycler<OpForTxnIdCallBack>() {
@Override
protected OpForTxnIdCallBack newObject(Handle<OpForTxnIdCallBack> handle) {
return new OpForTxnIdCallBack(handle);
}
};
}
private static class OpForVoidCallBack extends OpBase<Void> {
static OpForVoidCallBack create(ByteBuf cmd, CompletableFuture<Void> callback, PulsarClientImpl client,
String description, ClientCnx clientCnx) {
OpForVoidCallBack op = RECYCLER.get();
op.callback = callback;
op.cmd = cmd;
op.backoff = new BackoffBuilder()
.setInitialTime(client.getConfiguration().getInitialBackoffIntervalNanos(),
TimeUnit.NANOSECONDS)
.setMax(client.getConfiguration().getMaxBackoffIntervalNanos() / 10, TimeUnit.NANOSECONDS)
.setMandatoryStop(0, TimeUnit.MILLISECONDS)
.create();
op.description = description;
op.clientCnx = clientCnx;
return op;
}
private OpForVoidCallBack(Recycler.Handle<OpForVoidCallBack> recyclerHandle) {
this.recyclerHandle = recyclerHandle;
}
@Override
void recycle() {
this.backoff = null;
this.cmd = null;
this.callback = null;
this.description = null;
this.clientCnx = null;
recyclerHandle.recycle(this);
}
private final Recycler.Handle<OpForVoidCallBack> recyclerHandle;
private static final Recycler<OpForVoidCallBack> RECYCLER = new Recycler<OpForVoidCallBack>() {
@Override
protected OpForVoidCallBack newObject(Handle<OpForVoidCallBack> handle) {
return new OpForVoidCallBack(handle);
}
};
}
public static TransactionCoordinatorClientException getExceptionByServerError(ServerError serverError, String msg) {
switch (serverError) {
case TransactionCoordinatorNotFound:
return new TransactionCoordinatorClientException.CoordinatorNotFoundException(msg);
case InvalidTxnStatus:
return new TransactionCoordinatorClientException.InvalidTxnStatusException(msg);
case TransactionNotFound:
return new TransactionCoordinatorClientException.TransactionNotFoundException(msg);
default:
return new TransactionCoordinatorClientException(msg);
}
}
private void onResponse(OpBase<?> op) {
ReferenceCountUtil.safeRelease(op.cmd);
op.recycle();
semaphore.release();
}
private boolean canSendRequest(CompletableFuture<?> callback) {
try {
if (blockIfReachMaxPendingOps) {
semaphore.acquire();
} else {
if (!semaphore.tryAcquire()) {
callback.completeExceptionally(new TransactionCoordinatorClientException("Reach max pending ops."));
return false;
}
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
callback.completeExceptionally(TransactionCoordinatorClientException.unwrap(e));
return false;
}
return true;
}
private boolean checkStateAndSendRequest(OpBase<?> op) {
switch (getState()) {
case Ready:
ClientCnx cnx = cnx();
if (cnx != null) {
op.cmd.retain();
cnx.ctx().writeAndFlush(op.cmd, cnx().ctx().voidPromise());
} else {
LOG.error("The cnx was null when the TC handler was ready", new NullPointerException());
}
return true;
case Connecting:
return true;
case Closing:
case Closed:
op.callback.completeExceptionally(
new TransactionCoordinatorClientException.MetaStoreHandlerNotReadyException(
"Transaction meta store handler for tcId "
+ transactionCoordinatorId
+ " is closing or closed."));
onResponse(op);
return false;
case Failed:
case Uninitialized:
op.callback.completeExceptionally(
new TransactionCoordinatorClientException.MetaStoreHandlerNotReadyException(
"Transaction meta store handler for tcId "
+ transactionCoordinatorId
+ " not connected."));
onResponse(op);
return false;
default:
op.callback.completeExceptionally(
new TransactionCoordinatorClientException.MetaStoreHandlerNotReadyException(
transactionCoordinatorId));
onResponse(op);
return false;
}
}
@Override
public void run(Timeout timeout) throws Exception {
internalPinnedExecutor.execute(() -> {
if (timeout.isCancelled()) {
return;
}
long timeToWaitMs;
if (getState() == State.Closing || getState() == State.Closed) {
return;
}
RequestTime peeked = timeoutQueue.peek();
while (peeked != null && peeked.creationTimeMs + client.getConfiguration().getOperationTimeoutMs()
- System.currentTimeMillis() <= 0) {
RequestTime lastPolled = timeoutQueue.poll();
if (lastPolled != null) {
OpBase<?> op = pendingRequests.remove(lastPolled.requestId);
if (op != null && !op.callback.isDone()) {
op.callback.completeExceptionally(new PulsarClientException.TimeoutException(
String.format("%s failed due to timeout. connection: %s. pending-queue: %s",
op.description, op.clientCnx, pendingRequests.size())));
if (LOG.isDebugEnabled()) {
LOG.debug("Transaction coordinator request {} is timeout.", lastPolled.requestId);
}
onResponse(op);
}
} else {
break;
}
peeked = timeoutQueue.peek();
}
if (peeked == null) {
timeToWaitMs = client.getConfiguration().getOperationTimeoutMs();
} else {
long diff = (peeked.creationTimeMs + client.getConfiguration().getOperationTimeoutMs())
- System.currentTimeMillis();
if (diff <= 0) {
timeToWaitMs = client.getConfiguration().getOperationTimeoutMs();
} else {
timeToWaitMs = diff;
}
}
requestTimeout = client.timer().newTimeout(this, timeToWaitMs, TimeUnit.MILLISECONDS);
});
}
private ClientCnx cnx() {
return this.connectionHandler.cnx();
}
void connectionClosed(ClientCnx cnx) {
this.connectionHandler.connectionClosed(cnx);
}
@Override
public void close() throws IOException {
this.requestTimeout.cancel();
this.setState(State.Closed);
}
@VisibleForTesting
public State getConnectHandleState() {
return getState();
}
@Override
public String getHandlerName() {
return "Transaction meta store handler [" + transactionCoordinatorId + "]";
}
}
|
googleapis/google-cloud-java | 36,110 | java-analytics-data/proto-google-analytics-data-v1alpha/src/main/java/com/google/analytics/data/v1alpha/Metric.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/data/v1alpha/data.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.data.v1alpha;
/**
*
*
* <pre>
* The quantitative measurements of a report. For example, the metric
* `eventCount` is the total number of events. Requests are allowed up to 10
* metrics.
* </pre>
*
* Protobuf type {@code google.analytics.data.v1alpha.Metric}
*/
public final class Metric extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.data.v1alpha.Metric)
MetricOrBuilder {
private static final long serialVersionUID = 0L;
// Use Metric.newBuilder() to construct.
private Metric(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Metric() {
name_ = "";
expression_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Metric();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_Metric_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_Metric_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.data.v1alpha.Metric.class,
com.google.analytics.data.v1alpha.Metric.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* The name of the metric. See the [API
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/api-schema#metrics)
* for the list of metric names supported by core reporting methods such
* as `runReport` and `batchRunReports`. See
* [Realtime
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/realtime-api-schema#metrics)
* for the list of metric names supported by the `runRealtimeReport`
* method. See
* [Funnel
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/exploration-api-schema#metrics)
* for the list of metric names supported by the `runFunnelReport`
* method.
*
* If `expression` is specified, `name` can be any string that you would like
* within the allowed character set. For example if `expression` is
* `screenPageViews/sessions`, you could call that metric's name =
* `viewsPerSession`. Metric names that you choose must match the regular
* expression `^[a-zA-Z0-9_]$`.
*
* Metrics are referenced by `name` in `metricFilter`, `orderBys`, and metric
* `expression`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of the metric. See the [API
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/api-schema#metrics)
* for the list of metric names supported by core reporting methods such
* as `runReport` and `batchRunReports`. See
* [Realtime
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/realtime-api-schema#metrics)
* for the list of metric names supported by the `runRealtimeReport`
* method. See
* [Funnel
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/exploration-api-schema#metrics)
* for the list of metric names supported by the `runFunnelReport`
* method.
*
* If `expression` is specified, `name` can be any string that you would like
* within the allowed character set. For example if `expression` is
* `screenPageViews/sessions`, you could call that metric's name =
* `viewsPerSession`. Metric names that you choose must match the regular
* expression `^[a-zA-Z0-9_]$`.
*
* Metrics are referenced by `name` in `metricFilter`, `orderBys`, and metric
* `expression`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int EXPRESSION_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object expression_ = "";
/**
*
*
* <pre>
* A mathematical expression for derived metrics. For example, the metric
* Event count per user is `eventCount/totalUsers`.
* </pre>
*
* <code>string expression = 2;</code>
*
* @return The expression.
*/
@java.lang.Override
public java.lang.String getExpression() {
java.lang.Object ref = expression_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
expression_ = s;
return s;
}
}
/**
*
*
* <pre>
* A mathematical expression for derived metrics. For example, the metric
* Event count per user is `eventCount/totalUsers`.
* </pre>
*
* <code>string expression = 2;</code>
*
* @return The bytes for expression.
*/
@java.lang.Override
public com.google.protobuf.ByteString getExpressionBytes() {
java.lang.Object ref = expression_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
expression_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int INVISIBLE_FIELD_NUMBER = 3;
private boolean invisible_ = false;
/**
*
*
* <pre>
* Indicates if a metric is invisible in the report response. If a metric is
* invisible, the metric will not produce a column in the response, but can be
* used in `metricFilter`, `orderBys`, or a metric `expression`.
* </pre>
*
* <code>bool invisible = 3;</code>
*
* @return The invisible.
*/
@java.lang.Override
public boolean getInvisible() {
return invisible_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(expression_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, expression_);
}
if (invisible_ != false) {
output.writeBool(3, invisible_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(expression_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, expression_);
}
if (invisible_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, invisible_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.data.v1alpha.Metric)) {
return super.equals(obj);
}
com.google.analytics.data.v1alpha.Metric other = (com.google.analytics.data.v1alpha.Metric) obj;
if (!getName().equals(other.getName())) return false;
if (!getExpression().equals(other.getExpression())) return false;
if (getInvisible() != other.getInvisible()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + EXPRESSION_FIELD_NUMBER;
hash = (53 * hash) + getExpression().hashCode();
hash = (37 * hash) + INVISIBLE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getInvisible());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.data.v1alpha.Metric parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1alpha.Metric parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.Metric parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1alpha.Metric parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.Metric parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1alpha.Metric parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.Metric parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1alpha.Metric parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.Metric parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1alpha.Metric parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.Metric parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1alpha.Metric parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.analytics.data.v1alpha.Metric prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The quantitative measurements of a report. For example, the metric
* `eventCount` is the total number of events. Requests are allowed up to 10
* metrics.
* </pre>
*
* Protobuf type {@code google.analytics.data.v1alpha.Metric}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.data.v1alpha.Metric)
com.google.analytics.data.v1alpha.MetricOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_Metric_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_Metric_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.data.v1alpha.Metric.class,
com.google.analytics.data.v1alpha.Metric.Builder.class);
}
// Construct using com.google.analytics.data.v1alpha.Metric.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
expression_ = "";
invisible_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_Metric_descriptor;
}
@java.lang.Override
public com.google.analytics.data.v1alpha.Metric getDefaultInstanceForType() {
return com.google.analytics.data.v1alpha.Metric.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.data.v1alpha.Metric build() {
com.google.analytics.data.v1alpha.Metric result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.data.v1alpha.Metric buildPartial() {
com.google.analytics.data.v1alpha.Metric result =
new com.google.analytics.data.v1alpha.Metric(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.analytics.data.v1alpha.Metric result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.expression_ = expression_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.invisible_ = invisible_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.data.v1alpha.Metric) {
return mergeFrom((com.google.analytics.data.v1alpha.Metric) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.analytics.data.v1alpha.Metric other) {
if (other == com.google.analytics.data.v1alpha.Metric.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getExpression().isEmpty()) {
expression_ = other.expression_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getInvisible() != false) {
setInvisible(other.getInvisible());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
expression_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
invisible_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* The name of the metric. See the [API
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/api-schema#metrics)
* for the list of metric names supported by core reporting methods such
* as `runReport` and `batchRunReports`. See
* [Realtime
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/realtime-api-schema#metrics)
* for the list of metric names supported by the `runRealtimeReport`
* method. See
* [Funnel
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/exploration-api-schema#metrics)
* for the list of metric names supported by the `runFunnelReport`
* method.
*
* If `expression` is specified, `name` can be any string that you would like
* within the allowed character set. For example if `expression` is
* `screenPageViews/sessions`, you could call that metric's name =
* `viewsPerSession`. Metric names that you choose must match the regular
* expression `^[a-zA-Z0-9_]$`.
*
* Metrics are referenced by `name` in `metricFilter`, `orderBys`, and metric
* `expression`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of the metric. See the [API
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/api-schema#metrics)
* for the list of metric names supported by core reporting methods such
* as `runReport` and `batchRunReports`. See
* [Realtime
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/realtime-api-schema#metrics)
* for the list of metric names supported by the `runRealtimeReport`
* method. See
* [Funnel
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/exploration-api-schema#metrics)
* for the list of metric names supported by the `runFunnelReport`
* method.
*
* If `expression` is specified, `name` can be any string that you would like
* within the allowed character set. For example if `expression` is
* `screenPageViews/sessions`, you could call that metric's name =
* `viewsPerSession`. Metric names that you choose must match the regular
* expression `^[a-zA-Z0-9_]$`.
*
* Metrics are referenced by `name` in `metricFilter`, `orderBys`, and metric
* `expression`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of the metric. See the [API
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/api-schema#metrics)
* for the list of metric names supported by core reporting methods such
* as `runReport` and `batchRunReports`. See
* [Realtime
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/realtime-api-schema#metrics)
* for the list of metric names supported by the `runRealtimeReport`
* method. See
* [Funnel
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/exploration-api-schema#metrics)
* for the list of metric names supported by the `runFunnelReport`
* method.
*
* If `expression` is specified, `name` can be any string that you would like
* within the allowed character set. For example if `expression` is
* `screenPageViews/sessions`, you could call that metric's name =
* `viewsPerSession`. Metric names that you choose must match the regular
* expression `^[a-zA-Z0-9_]$`.
*
* Metrics are referenced by `name` in `metricFilter`, `orderBys`, and metric
* `expression`.
* </pre>
*
* <code>string name = 1;</code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the metric. See the [API
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/api-schema#metrics)
* for the list of metric names supported by core reporting methods such
* as `runReport` and `batchRunReports`. See
* [Realtime
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/realtime-api-schema#metrics)
* for the list of metric names supported by the `runRealtimeReport`
* method. See
* [Funnel
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/exploration-api-schema#metrics)
* for the list of metric names supported by the `runFunnelReport`
* method.
*
* If `expression` is specified, `name` can be any string that you would like
* within the allowed character set. For example if `expression` is
* `screenPageViews/sessions`, you could call that metric's name =
* `viewsPerSession`. Metric names that you choose must match the regular
* expression `^[a-zA-Z0-9_]$`.
*
* Metrics are referenced by `name` in `metricFilter`, `orderBys`, and metric
* `expression`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the metric. See the [API
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/api-schema#metrics)
* for the list of metric names supported by core reporting methods such
* as `runReport` and `batchRunReports`. See
* [Realtime
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/realtime-api-schema#metrics)
* for the list of metric names supported by the `runRealtimeReport`
* method. See
* [Funnel
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/exploration-api-schema#metrics)
* for the list of metric names supported by the `runFunnelReport`
* method.
*
* If `expression` is specified, `name` can be any string that you would like
* within the allowed character set. For example if `expression` is
* `screenPageViews/sessions`, you could call that metric's name =
* `viewsPerSession`. Metric names that you choose must match the regular
* expression `^[a-zA-Z0-9_]$`.
*
* Metrics are referenced by `name` in `metricFilter`, `orderBys`, and metric
* `expression`.
* </pre>
*
* <code>string name = 1;</code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object expression_ = "";
/**
*
*
* <pre>
* A mathematical expression for derived metrics. For example, the metric
* Event count per user is `eventCount/totalUsers`.
* </pre>
*
* <code>string expression = 2;</code>
*
* @return The expression.
*/
public java.lang.String getExpression() {
java.lang.Object ref = expression_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
expression_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A mathematical expression for derived metrics. For example, the metric
* Event count per user is `eventCount/totalUsers`.
* </pre>
*
* <code>string expression = 2;</code>
*
* @return The bytes for expression.
*/
public com.google.protobuf.ByteString getExpressionBytes() {
java.lang.Object ref = expression_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
expression_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A mathematical expression for derived metrics. For example, the metric
* Event count per user is `eventCount/totalUsers`.
* </pre>
*
* <code>string expression = 2;</code>
*
* @param value The expression to set.
* @return This builder for chaining.
*/
public Builder setExpression(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
expression_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A mathematical expression for derived metrics. For example, the metric
* Event count per user is `eventCount/totalUsers`.
* </pre>
*
* <code>string expression = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearExpression() {
expression_ = getDefaultInstance().getExpression();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A mathematical expression for derived metrics. For example, the metric
* Event count per user is `eventCount/totalUsers`.
* </pre>
*
* <code>string expression = 2;</code>
*
* @param value The bytes for expression to set.
* @return This builder for chaining.
*/
public Builder setExpressionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
expression_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private boolean invisible_;
/**
*
*
* <pre>
* Indicates if a metric is invisible in the report response. If a metric is
* invisible, the metric will not produce a column in the response, but can be
* used in `metricFilter`, `orderBys`, or a metric `expression`.
* </pre>
*
* <code>bool invisible = 3;</code>
*
* @return The invisible.
*/
@java.lang.Override
public boolean getInvisible() {
return invisible_;
}
/**
*
*
* <pre>
* Indicates if a metric is invisible in the report response. If a metric is
* invisible, the metric will not produce a column in the response, but can be
* used in `metricFilter`, `orderBys`, or a metric `expression`.
* </pre>
*
* <code>bool invisible = 3;</code>
*
* @param value The invisible to set.
* @return This builder for chaining.
*/
public Builder setInvisible(boolean value) {
invisible_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Indicates if a metric is invisible in the report response. If a metric is
* invisible, the metric will not produce a column in the response, but can be
* used in `metricFilter`, `orderBys`, or a metric `expression`.
* </pre>
*
* <code>bool invisible = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearInvisible() {
bitField0_ = (bitField0_ & ~0x00000004);
invisible_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.data.v1alpha.Metric)
}
// @@protoc_insertion_point(class_scope:google.analytics.data.v1alpha.Metric)
private static final com.google.analytics.data.v1alpha.Metric DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.data.v1alpha.Metric();
}
public static com.google.analytics.data.v1alpha.Metric getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Metric> PARSER =
new com.google.protobuf.AbstractParser<Metric>() {
@java.lang.Override
public Metric parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<Metric> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Metric> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.data.v1alpha.Metric getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,010 | java-networkservices/proto-google-cloud-networkservices-v1/src/main/java/com/google/cloud/networkservices/v1/CreateGatewayRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/networkservices/v1/gateway.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.networkservices.v1;
/**
*
*
* <pre>
* Request used by the CreateGateway method.
* </pre>
*
* Protobuf type {@code google.cloud.networkservices.v1.CreateGatewayRequest}
*/
public final class CreateGatewayRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.networkservices.v1.CreateGatewayRequest)
CreateGatewayRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateGatewayRequest.newBuilder() to construct.
private CreateGatewayRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateGatewayRequest() {
parent_ = "";
gatewayId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateGatewayRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkservices.v1.GatewayProto
.internal_static_google_cloud_networkservices_v1_CreateGatewayRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkservices.v1.GatewayProto
.internal_static_google_cloud_networkservices_v1_CreateGatewayRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkservices.v1.CreateGatewayRequest.class,
com.google.cloud.networkservices.v1.CreateGatewayRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource of the Gateway. Must be in the
* format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent resource of the Gateway. Must be in the
* format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int GATEWAY_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object gatewayId_ = "";
/**
*
*
* <pre>
* Required. Short name of the Gateway resource to be created.
* </pre>
*
* <code>string gateway_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The gatewayId.
*/
@java.lang.Override
public java.lang.String getGatewayId() {
java.lang.Object ref = gatewayId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
gatewayId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Short name of the Gateway resource to be created.
* </pre>
*
* <code>string gateway_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for gatewayId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getGatewayIdBytes() {
java.lang.Object ref = gatewayId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
gatewayId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int GATEWAY_FIELD_NUMBER = 3;
private com.google.cloud.networkservices.v1.Gateway gateway_;
/**
*
*
* <pre>
* Required. Gateway resource to be created.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.Gateway gateway = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the gateway field is set.
*/
@java.lang.Override
public boolean hasGateway() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Gateway resource to be created.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.Gateway gateway = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The gateway.
*/
@java.lang.Override
public com.google.cloud.networkservices.v1.Gateway getGateway() {
return gateway_ == null
? com.google.cloud.networkservices.v1.Gateway.getDefaultInstance()
: gateway_;
}
/**
*
*
* <pre>
* Required. Gateway resource to be created.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.Gateway gateway = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.networkservices.v1.GatewayOrBuilder getGatewayOrBuilder() {
return gateway_ == null
? com.google.cloud.networkservices.v1.Gateway.getDefaultInstance()
: gateway_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(gatewayId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, gatewayId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getGateway());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(gatewayId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, gatewayId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getGateway());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.networkservices.v1.CreateGatewayRequest)) {
return super.equals(obj);
}
com.google.cloud.networkservices.v1.CreateGatewayRequest other =
(com.google.cloud.networkservices.v1.CreateGatewayRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getGatewayId().equals(other.getGatewayId())) return false;
if (hasGateway() != other.hasGateway()) return false;
if (hasGateway()) {
if (!getGateway().equals(other.getGateway())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + GATEWAY_ID_FIELD_NUMBER;
hash = (53 * hash) + getGatewayId().hashCode();
if (hasGateway()) {
hash = (37 * hash) + GATEWAY_FIELD_NUMBER;
hash = (53 * hash) + getGateway().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.networkservices.v1.CreateGatewayRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.CreateGatewayRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.CreateGatewayRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.CreateGatewayRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.CreateGatewayRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.CreateGatewayRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.CreateGatewayRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.CreateGatewayRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.CreateGatewayRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.CreateGatewayRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.CreateGatewayRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.CreateGatewayRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.networkservices.v1.CreateGatewayRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request used by the CreateGateway method.
* </pre>
*
* Protobuf type {@code google.cloud.networkservices.v1.CreateGatewayRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.networkservices.v1.CreateGatewayRequest)
com.google.cloud.networkservices.v1.CreateGatewayRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkservices.v1.GatewayProto
.internal_static_google_cloud_networkservices_v1_CreateGatewayRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkservices.v1.GatewayProto
.internal_static_google_cloud_networkservices_v1_CreateGatewayRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkservices.v1.CreateGatewayRequest.class,
com.google.cloud.networkservices.v1.CreateGatewayRequest.Builder.class);
}
// Construct using com.google.cloud.networkservices.v1.CreateGatewayRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getGatewayFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
gatewayId_ = "";
gateway_ = null;
if (gatewayBuilder_ != null) {
gatewayBuilder_.dispose();
gatewayBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.networkservices.v1.GatewayProto
.internal_static_google_cloud_networkservices_v1_CreateGatewayRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.CreateGatewayRequest getDefaultInstanceForType() {
return com.google.cloud.networkservices.v1.CreateGatewayRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.networkservices.v1.CreateGatewayRequest build() {
com.google.cloud.networkservices.v1.CreateGatewayRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.CreateGatewayRequest buildPartial() {
com.google.cloud.networkservices.v1.CreateGatewayRequest result =
new com.google.cloud.networkservices.v1.CreateGatewayRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.networkservices.v1.CreateGatewayRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.gatewayId_ = gatewayId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.gateway_ = gatewayBuilder_ == null ? gateway_ : gatewayBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.networkservices.v1.CreateGatewayRequest) {
return mergeFrom((com.google.cloud.networkservices.v1.CreateGatewayRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.networkservices.v1.CreateGatewayRequest other) {
if (other == com.google.cloud.networkservices.v1.CreateGatewayRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getGatewayId().isEmpty()) {
gatewayId_ = other.gatewayId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasGateway()) {
mergeGateway(other.getGateway());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
gatewayId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getGatewayFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource of the Gateway. Must be in the
* format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource of the Gateway. Must be in the
* format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource of the Gateway. Must be in the
* format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource of the Gateway. Must be in the
* format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource of the Gateway. Must be in the
* format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object gatewayId_ = "";
/**
*
*
* <pre>
* Required. Short name of the Gateway resource to be created.
* </pre>
*
* <code>string gateway_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The gatewayId.
*/
public java.lang.String getGatewayId() {
java.lang.Object ref = gatewayId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
gatewayId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Short name of the Gateway resource to be created.
* </pre>
*
* <code>string gateway_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for gatewayId.
*/
public com.google.protobuf.ByteString getGatewayIdBytes() {
java.lang.Object ref = gatewayId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
gatewayId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Short name of the Gateway resource to be created.
* </pre>
*
* <code>string gateway_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The gatewayId to set.
* @return This builder for chaining.
*/
public Builder setGatewayId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
gatewayId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Short name of the Gateway resource to be created.
* </pre>
*
* <code>string gateway_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearGatewayId() {
gatewayId_ = getDefaultInstance().getGatewayId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Short name of the Gateway resource to be created.
* </pre>
*
* <code>string gateway_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for gatewayId to set.
* @return This builder for chaining.
*/
public Builder setGatewayIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
gatewayId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.networkservices.v1.Gateway gateway_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.Gateway,
com.google.cloud.networkservices.v1.Gateway.Builder,
com.google.cloud.networkservices.v1.GatewayOrBuilder>
gatewayBuilder_;
/**
*
*
* <pre>
* Required. Gateway resource to be created.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.Gateway gateway = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the gateway field is set.
*/
public boolean hasGateway() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. Gateway resource to be created.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.Gateway gateway = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The gateway.
*/
public com.google.cloud.networkservices.v1.Gateway getGateway() {
if (gatewayBuilder_ == null) {
return gateway_ == null
? com.google.cloud.networkservices.v1.Gateway.getDefaultInstance()
: gateway_;
} else {
return gatewayBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Gateway resource to be created.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.Gateway gateway = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setGateway(com.google.cloud.networkservices.v1.Gateway value) {
if (gatewayBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
gateway_ = value;
} else {
gatewayBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Gateway resource to be created.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.Gateway gateway = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setGateway(com.google.cloud.networkservices.v1.Gateway.Builder builderForValue) {
if (gatewayBuilder_ == null) {
gateway_ = builderForValue.build();
} else {
gatewayBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Gateway resource to be created.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.Gateway gateway = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeGateway(com.google.cloud.networkservices.v1.Gateway value) {
if (gatewayBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& gateway_ != null
&& gateway_ != com.google.cloud.networkservices.v1.Gateway.getDefaultInstance()) {
getGatewayBuilder().mergeFrom(value);
} else {
gateway_ = value;
}
} else {
gatewayBuilder_.mergeFrom(value);
}
if (gateway_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Gateway resource to be created.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.Gateway gateway = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearGateway() {
bitField0_ = (bitField0_ & ~0x00000004);
gateway_ = null;
if (gatewayBuilder_ != null) {
gatewayBuilder_.dispose();
gatewayBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Gateway resource to be created.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.Gateway gateway = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.networkservices.v1.Gateway.Builder getGatewayBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getGatewayFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Gateway resource to be created.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.Gateway gateway = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.networkservices.v1.GatewayOrBuilder getGatewayOrBuilder() {
if (gatewayBuilder_ != null) {
return gatewayBuilder_.getMessageOrBuilder();
} else {
return gateway_ == null
? com.google.cloud.networkservices.v1.Gateway.getDefaultInstance()
: gateway_;
}
}
/**
*
*
* <pre>
* Required. Gateway resource to be created.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.Gateway gateway = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.Gateway,
com.google.cloud.networkservices.v1.Gateway.Builder,
com.google.cloud.networkservices.v1.GatewayOrBuilder>
getGatewayFieldBuilder() {
if (gatewayBuilder_ == null) {
gatewayBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.Gateway,
com.google.cloud.networkservices.v1.Gateway.Builder,
com.google.cloud.networkservices.v1.GatewayOrBuilder>(
getGateway(), getParentForChildren(), isClean());
gateway_ = null;
}
return gatewayBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.networkservices.v1.CreateGatewayRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.networkservices.v1.CreateGatewayRequest)
private static final com.google.cloud.networkservices.v1.CreateGatewayRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.networkservices.v1.CreateGatewayRequest();
}
public static com.google.cloud.networkservices.v1.CreateGatewayRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateGatewayRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateGatewayRequest>() {
@java.lang.Override
public CreateGatewayRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateGatewayRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateGatewayRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.CreateGatewayRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,048 | java-asset/proto-google-cloud-asset-v1/src/main/java/com/google/cloud/asset/v1/ListSavedQueriesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/asset/v1/asset_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.asset.v1;
/**
*
*
* <pre>
* Response of listing saved queries.
* </pre>
*
* Protobuf type {@code google.cloud.asset.v1.ListSavedQueriesResponse}
*/
public final class ListSavedQueriesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.asset.v1.ListSavedQueriesResponse)
ListSavedQueriesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListSavedQueriesResponse.newBuilder() to construct.
private ListSavedQueriesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListSavedQueriesResponse() {
savedQueries_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListSavedQueriesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.asset.v1.AssetServiceProto
.internal_static_google_cloud_asset_v1_ListSavedQueriesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.asset.v1.AssetServiceProto
.internal_static_google_cloud_asset_v1_ListSavedQueriesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.asset.v1.ListSavedQueriesResponse.class,
com.google.cloud.asset.v1.ListSavedQueriesResponse.Builder.class);
}
public static final int SAVED_QUERIES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.asset.v1.SavedQuery> savedQueries_;
/**
*
*
* <pre>
* A list of savedQueries.
* </pre>
*
* <code>repeated .google.cloud.asset.v1.SavedQuery saved_queries = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.asset.v1.SavedQuery> getSavedQueriesList() {
return savedQueries_;
}
/**
*
*
* <pre>
* A list of savedQueries.
* </pre>
*
* <code>repeated .google.cloud.asset.v1.SavedQuery saved_queries = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.asset.v1.SavedQueryOrBuilder>
getSavedQueriesOrBuilderList() {
return savedQueries_;
}
/**
*
*
* <pre>
* A list of savedQueries.
* </pre>
*
* <code>repeated .google.cloud.asset.v1.SavedQuery saved_queries = 1;</code>
*/
@java.lang.Override
public int getSavedQueriesCount() {
return savedQueries_.size();
}
/**
*
*
* <pre>
* A list of savedQueries.
* </pre>
*
* <code>repeated .google.cloud.asset.v1.SavedQuery saved_queries = 1;</code>
*/
@java.lang.Override
public com.google.cloud.asset.v1.SavedQuery getSavedQueries(int index) {
return savedQueries_.get(index);
}
/**
*
*
* <pre>
* A list of savedQueries.
* </pre>
*
* <code>repeated .google.cloud.asset.v1.SavedQuery saved_queries = 1;</code>
*/
@java.lang.Override
public com.google.cloud.asset.v1.SavedQueryOrBuilder getSavedQueriesOrBuilder(int index) {
return savedQueries_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < savedQueries_.size(); i++) {
output.writeMessage(1, savedQueries_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < savedQueries_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, savedQueries_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.asset.v1.ListSavedQueriesResponse)) {
return super.equals(obj);
}
com.google.cloud.asset.v1.ListSavedQueriesResponse other =
(com.google.cloud.asset.v1.ListSavedQueriesResponse) obj;
if (!getSavedQueriesList().equals(other.getSavedQueriesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getSavedQueriesCount() > 0) {
hash = (37 * hash) + SAVED_QUERIES_FIELD_NUMBER;
hash = (53 * hash) + getSavedQueriesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.asset.v1.ListSavedQueriesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1.ListSavedQueriesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1.ListSavedQueriesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1.ListSavedQueriesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1.ListSavedQueriesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1.ListSavedQueriesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1.ListSavedQueriesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1.ListSavedQueriesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.asset.v1.ListSavedQueriesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1.ListSavedQueriesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.asset.v1.ListSavedQueriesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1.ListSavedQueriesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.asset.v1.ListSavedQueriesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response of listing saved queries.
* </pre>
*
* Protobuf type {@code google.cloud.asset.v1.ListSavedQueriesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.asset.v1.ListSavedQueriesResponse)
com.google.cloud.asset.v1.ListSavedQueriesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.asset.v1.AssetServiceProto
.internal_static_google_cloud_asset_v1_ListSavedQueriesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.asset.v1.AssetServiceProto
.internal_static_google_cloud_asset_v1_ListSavedQueriesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.asset.v1.ListSavedQueriesResponse.class,
com.google.cloud.asset.v1.ListSavedQueriesResponse.Builder.class);
}
// Construct using com.google.cloud.asset.v1.ListSavedQueriesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (savedQueriesBuilder_ == null) {
savedQueries_ = java.util.Collections.emptyList();
} else {
savedQueries_ = null;
savedQueriesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.asset.v1.AssetServiceProto
.internal_static_google_cloud_asset_v1_ListSavedQueriesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.asset.v1.ListSavedQueriesResponse getDefaultInstanceForType() {
return com.google.cloud.asset.v1.ListSavedQueriesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.asset.v1.ListSavedQueriesResponse build() {
com.google.cloud.asset.v1.ListSavedQueriesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.asset.v1.ListSavedQueriesResponse buildPartial() {
com.google.cloud.asset.v1.ListSavedQueriesResponse result =
new com.google.cloud.asset.v1.ListSavedQueriesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.asset.v1.ListSavedQueriesResponse result) {
if (savedQueriesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
savedQueries_ = java.util.Collections.unmodifiableList(savedQueries_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.savedQueries_ = savedQueries_;
} else {
result.savedQueries_ = savedQueriesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.asset.v1.ListSavedQueriesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.asset.v1.ListSavedQueriesResponse) {
return mergeFrom((com.google.cloud.asset.v1.ListSavedQueriesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.asset.v1.ListSavedQueriesResponse other) {
if (other == com.google.cloud.asset.v1.ListSavedQueriesResponse.getDefaultInstance())
return this;
if (savedQueriesBuilder_ == null) {
if (!other.savedQueries_.isEmpty()) {
if (savedQueries_.isEmpty()) {
savedQueries_ = other.savedQueries_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureSavedQueriesIsMutable();
savedQueries_.addAll(other.savedQueries_);
}
onChanged();
}
} else {
if (!other.savedQueries_.isEmpty()) {
if (savedQueriesBuilder_.isEmpty()) {
savedQueriesBuilder_.dispose();
savedQueriesBuilder_ = null;
savedQueries_ = other.savedQueries_;
bitField0_ = (bitField0_ & ~0x00000001);
savedQueriesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getSavedQueriesFieldBuilder()
: null;
} else {
savedQueriesBuilder_.addAllMessages(other.savedQueries_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.asset.v1.SavedQuery m =
input.readMessage(
com.google.cloud.asset.v1.SavedQuery.parser(), extensionRegistry);
if (savedQueriesBuilder_ == null) {
ensureSavedQueriesIsMutable();
savedQueries_.add(m);
} else {
savedQueriesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.asset.v1.SavedQuery> savedQueries_ =
java.util.Collections.emptyList();
private void ensureSavedQueriesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
savedQueries_ =
new java.util.ArrayList<com.google.cloud.asset.v1.SavedQuery>(savedQueries_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.asset.v1.SavedQuery,
com.google.cloud.asset.v1.SavedQuery.Builder,
com.google.cloud.asset.v1.SavedQueryOrBuilder>
savedQueriesBuilder_;
/**
*
*
* <pre>
* A list of savedQueries.
* </pre>
*
* <code>repeated .google.cloud.asset.v1.SavedQuery saved_queries = 1;</code>
*/
public java.util.List<com.google.cloud.asset.v1.SavedQuery> getSavedQueriesList() {
if (savedQueriesBuilder_ == null) {
return java.util.Collections.unmodifiableList(savedQueries_);
} else {
return savedQueriesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* A list of savedQueries.
* </pre>
*
* <code>repeated .google.cloud.asset.v1.SavedQuery saved_queries = 1;</code>
*/
public int getSavedQueriesCount() {
if (savedQueriesBuilder_ == null) {
return savedQueries_.size();
} else {
return savedQueriesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* A list of savedQueries.
* </pre>
*
* <code>repeated .google.cloud.asset.v1.SavedQuery saved_queries = 1;</code>
*/
public com.google.cloud.asset.v1.SavedQuery getSavedQueries(int index) {
if (savedQueriesBuilder_ == null) {
return savedQueries_.get(index);
} else {
return savedQueriesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* A list of savedQueries.
* </pre>
*
* <code>repeated .google.cloud.asset.v1.SavedQuery saved_queries = 1;</code>
*/
public Builder setSavedQueries(int index, com.google.cloud.asset.v1.SavedQuery value) {
if (savedQueriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSavedQueriesIsMutable();
savedQueries_.set(index, value);
onChanged();
} else {
savedQueriesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of savedQueries.
* </pre>
*
* <code>repeated .google.cloud.asset.v1.SavedQuery saved_queries = 1;</code>
*/
public Builder setSavedQueries(
int index, com.google.cloud.asset.v1.SavedQuery.Builder builderForValue) {
if (savedQueriesBuilder_ == null) {
ensureSavedQueriesIsMutable();
savedQueries_.set(index, builderForValue.build());
onChanged();
} else {
savedQueriesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of savedQueries.
* </pre>
*
* <code>repeated .google.cloud.asset.v1.SavedQuery saved_queries = 1;</code>
*/
public Builder addSavedQueries(com.google.cloud.asset.v1.SavedQuery value) {
if (savedQueriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSavedQueriesIsMutable();
savedQueries_.add(value);
onChanged();
} else {
savedQueriesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* A list of savedQueries.
* </pre>
*
* <code>repeated .google.cloud.asset.v1.SavedQuery saved_queries = 1;</code>
*/
public Builder addSavedQueries(int index, com.google.cloud.asset.v1.SavedQuery value) {
if (savedQueriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSavedQueriesIsMutable();
savedQueries_.add(index, value);
onChanged();
} else {
savedQueriesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of savedQueries.
* </pre>
*
* <code>repeated .google.cloud.asset.v1.SavedQuery saved_queries = 1;</code>
*/
public Builder addSavedQueries(com.google.cloud.asset.v1.SavedQuery.Builder builderForValue) {
if (savedQueriesBuilder_ == null) {
ensureSavedQueriesIsMutable();
savedQueries_.add(builderForValue.build());
onChanged();
} else {
savedQueriesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of savedQueries.
* </pre>
*
* <code>repeated .google.cloud.asset.v1.SavedQuery saved_queries = 1;</code>
*/
public Builder addSavedQueries(
int index, com.google.cloud.asset.v1.SavedQuery.Builder builderForValue) {
if (savedQueriesBuilder_ == null) {
ensureSavedQueriesIsMutable();
savedQueries_.add(index, builderForValue.build());
onChanged();
} else {
savedQueriesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of savedQueries.
* </pre>
*
* <code>repeated .google.cloud.asset.v1.SavedQuery saved_queries = 1;</code>
*/
public Builder addAllSavedQueries(
java.lang.Iterable<? extends com.google.cloud.asset.v1.SavedQuery> values) {
if (savedQueriesBuilder_ == null) {
ensureSavedQueriesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, savedQueries_);
onChanged();
} else {
savedQueriesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* A list of savedQueries.
* </pre>
*
* <code>repeated .google.cloud.asset.v1.SavedQuery saved_queries = 1;</code>
*/
public Builder clearSavedQueries() {
if (savedQueriesBuilder_ == null) {
savedQueries_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
savedQueriesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* A list of savedQueries.
* </pre>
*
* <code>repeated .google.cloud.asset.v1.SavedQuery saved_queries = 1;</code>
*/
public Builder removeSavedQueries(int index) {
if (savedQueriesBuilder_ == null) {
ensureSavedQueriesIsMutable();
savedQueries_.remove(index);
onChanged();
} else {
savedQueriesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* A list of savedQueries.
* </pre>
*
* <code>repeated .google.cloud.asset.v1.SavedQuery saved_queries = 1;</code>
*/
public com.google.cloud.asset.v1.SavedQuery.Builder getSavedQueriesBuilder(int index) {
return getSavedQueriesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* A list of savedQueries.
* </pre>
*
* <code>repeated .google.cloud.asset.v1.SavedQuery saved_queries = 1;</code>
*/
public com.google.cloud.asset.v1.SavedQueryOrBuilder getSavedQueriesOrBuilder(int index) {
if (savedQueriesBuilder_ == null) {
return savedQueries_.get(index);
} else {
return savedQueriesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* A list of savedQueries.
* </pre>
*
* <code>repeated .google.cloud.asset.v1.SavedQuery saved_queries = 1;</code>
*/
public java.util.List<? extends com.google.cloud.asset.v1.SavedQueryOrBuilder>
getSavedQueriesOrBuilderList() {
if (savedQueriesBuilder_ != null) {
return savedQueriesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(savedQueries_);
}
}
/**
*
*
* <pre>
* A list of savedQueries.
* </pre>
*
* <code>repeated .google.cloud.asset.v1.SavedQuery saved_queries = 1;</code>
*/
public com.google.cloud.asset.v1.SavedQuery.Builder addSavedQueriesBuilder() {
return getSavedQueriesFieldBuilder()
.addBuilder(com.google.cloud.asset.v1.SavedQuery.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of savedQueries.
* </pre>
*
* <code>repeated .google.cloud.asset.v1.SavedQuery saved_queries = 1;</code>
*/
public com.google.cloud.asset.v1.SavedQuery.Builder addSavedQueriesBuilder(int index) {
return getSavedQueriesFieldBuilder()
.addBuilder(index, com.google.cloud.asset.v1.SavedQuery.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of savedQueries.
* </pre>
*
* <code>repeated .google.cloud.asset.v1.SavedQuery saved_queries = 1;</code>
*/
public java.util.List<com.google.cloud.asset.v1.SavedQuery.Builder>
getSavedQueriesBuilderList() {
return getSavedQueriesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.asset.v1.SavedQuery,
com.google.cloud.asset.v1.SavedQuery.Builder,
com.google.cloud.asset.v1.SavedQueryOrBuilder>
getSavedQueriesFieldBuilder() {
if (savedQueriesBuilder_ == null) {
savedQueriesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.asset.v1.SavedQuery,
com.google.cloud.asset.v1.SavedQuery.Builder,
com.google.cloud.asset.v1.SavedQueryOrBuilder>(
savedQueries_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
savedQueries_ = null;
}
return savedQueriesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.asset.v1.ListSavedQueriesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.asset.v1.ListSavedQueriesResponse)
private static final com.google.cloud.asset.v1.ListSavedQueriesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.asset.v1.ListSavedQueriesResponse();
}
public static com.google.cloud.asset.v1.ListSavedQueriesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListSavedQueriesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListSavedQueriesResponse>() {
@java.lang.Override
public ListSavedQueriesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListSavedQueriesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListSavedQueriesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.asset.v1.ListSavedQueriesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/gora | 36,060 | gora-cassandra/src/main/java/org/apache/gora/cassandra/serializers/CassandraQueryFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gora.cassandra.serializers;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.Delete;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Select;
import com.datastax.driver.core.querybuilder.Update;
import com.datastax.driver.mapping.annotations.UDT;
import org.apache.avro.Schema;
import org.apache.gora.cassandra.bean.CassandraKey;
import org.apache.gora.cassandra.bean.ClusterKeyField;
import org.apache.gora.cassandra.bean.Field;
import org.apache.gora.cassandra.bean.KeySpace;
import org.apache.gora.cassandra.bean.PartitionKeyField;
import org.apache.gora.cassandra.query.CassandraQuery;
import org.apache.gora.cassandra.store.CassandraMapping;
import org.apache.gora.cassandra.store.CassandraStore;
import org.apache.gora.query.Query;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
* This class is used create Cassandra Queries.
*/
class CassandraQueryFactory {
private static final Logger LOG = LoggerFactory.getLogger(CassandraQueryFactory.class);
/**
* This method returns the CQL query to create key space.
* refer : http://docs.datastax.com/en/cql/3.1/cql/cql_reference/create_keyspace_r.html
*
* @param mapping Cassandra Mapping {@link CassandraMapping}
* @return CQL Query
*/
static String getCreateKeySpaceQuery(CassandraMapping mapping) {
KeySpace keySpace = mapping.getKeySpace();
StringBuilder stringBuffer = new StringBuilder();
stringBuffer.append("CREATE KEYSPACE IF NOT EXISTS ").append(keySpace.getName()).append(" WITH REPLICATION = { 'class' : ");
KeySpace.PlacementStrategy placementStrategy = keySpace.getPlacementStrategy();
stringBuffer.append("'").append(placementStrategy).append("'").append(", ").append("'");
switch (placementStrategy) {
case SimpleStrategy:
stringBuffer.append("replication_factor").append("'").append(" : ").append(keySpace.getReplicationFactor()).append(" }");
break;
case NetworkTopologyStrategy:
boolean isCommaNeeded = false;
for (Map.Entry<String, Integer> entry : keySpace.getDataCenters().entrySet()) {
if (isCommaNeeded) {
stringBuffer.append(", '");
}
stringBuffer.append(entry.getKey()).append("'").append(" : ").append(entry.getValue());
isCommaNeeded = true;
}
stringBuffer.append(" }");
break;
}
if (keySpace.isDurableWritesEnabled()) {
stringBuffer.append(" AND DURABLE_WRITES = ").append(keySpace.isDurableWritesEnabled());
}
return stringBuffer.toString();
}
/**
* This method returns the CQL query to table.
* refer : http://docs.datastax.com/en/cql/3.1/cql/cql_reference/create_table_r.html
* <p>
* Trick : To have a consistency of the order of the columns, first we append partition keys, second cluster keys and finally other columns.
* It's very much needed to follow the same order in other CRUD operations as well.
*
* @param mapping Cassandra mapping {@link CassandraMapping}
* @return CQL Query
*/
static String getCreateTableQuery(CassandraMapping mapping) {
StringBuilder stringBuffer = new StringBuilder();
stringBuffer.append("CREATE TABLE IF NOT EXISTS ").append(mapping.getKeySpace().getName()).append(".").append(mapping.getCoreName()).append(" (");
CassandraKey cassandraKey = mapping.getCassandraKey();
// appending Cassandra Persistent columns into db schema
processFieldsForCreateTableQuery(mapping.getFieldList(), false, stringBuffer);
if (cassandraKey != null) {
processFieldsForCreateTableQuery(cassandraKey.getFieldList(), true, stringBuffer);
List<PartitionKeyField> partitionKeys = cassandraKey.getPartitionKeyFields();
if (partitionKeys != null) {
stringBuffer.append(", PRIMARY KEY (");
boolean isCommaNeededToApply = false;
for (PartitionKeyField keyField : partitionKeys) {
if (isCommaNeededToApply) {
stringBuffer.append(",");
}
if (keyField.isComposite()) {
stringBuffer.append("(");
boolean isCommaNeededHere = false;
for (Field field : keyField.getFields()) {
if (isCommaNeededHere) {
stringBuffer.append(", ");
}
stringBuffer.append(field.getColumnName());
isCommaNeededHere = true;
}
stringBuffer.append(")");
} else {
stringBuffer.append(keyField.getColumnName());
}
isCommaNeededToApply = true;
}
stringBuffer.append(")");
}
}
stringBuffer.append(")");
boolean isWithNeeded = true;
if (Boolean.parseBoolean(mapping.getProperty("compactStorage"))) {
stringBuffer.append(" WITH COMPACT STORAGE ");
isWithNeeded = false;
}
String id = mapping.getProperty("id");
if (id != null) {
if (isWithNeeded) {
stringBuffer.append(" WITH ");
} else {
stringBuffer.append(" AND ");
}
stringBuffer.append("ID = '").append(id).append("'");
isWithNeeded = false;
}
if (cassandraKey != null) {
List<ClusterKeyField> clusterKeyFields = cassandraKey.getClusterKeyFields();
if (clusterKeyFields != null) {
if (isWithNeeded) {
stringBuffer.append(" WITH ");
} else {
stringBuffer.append(" AND ");
}
stringBuffer.append(" CLUSTERING ORDER BY (");
boolean isCommaNeededToApply = false;
for (ClusterKeyField keyField : clusterKeyFields) {
if (isCommaNeededToApply) {
stringBuffer.append(", ");
}
stringBuffer.append(keyField.getColumnName()).append(" ");
if (keyField.getOrder() != null) {
stringBuffer.append(keyField.getOrder());
}
isCommaNeededToApply = true;
}
stringBuffer.append(")");
}
}
return stringBuffer.toString();
}
private static void processFieldsForCreateTableQuery(List<Field> fields, boolean isCommaNeeded, StringBuilder stringBuilder) {
for (Field field : fields) {
if (isCommaNeeded) {
stringBuilder.append(", ");
}
stringBuilder.append(field.getColumnName()).append(" ").append(field.getType());
boolean isStaticColumn = Boolean.parseBoolean(field.getProperty("static"));
boolean isPrimaryKey = Boolean.parseBoolean(field.getProperty("primarykey"));
if (isStaticColumn) {
stringBuilder.append(" STATIC");
}
if (isPrimaryKey) {
stringBuilder.append(" PRIMARY KEY ");
}
isCommaNeeded = true;
}
}
/**
* This method returns the CQL query to drop table.
* refer : http://docs.datastax.com/en/cql/3.1/cql/cql_reference/drop_table_r.html
*
* @param mapping Cassandra Mapping {@link CassandraMapping}
* @return CQL query
*/
static String getDropTableQuery(CassandraMapping mapping) {
return "DROP TABLE IF EXISTS " + mapping.getKeySpace().getName() + "." + mapping.getCoreName();
}
/**
* This method returns the CQL query to drop key space.
* refer : http://docs.datastax.com/en/cql/3.1/cql/cql_reference/drop_keyspace_r.html
*
* @param mapping Cassandra Mapping {@link CassandraMapping}
* @return CQL query
*/
static String getDropKeySpaceQuery(CassandraMapping mapping) {
return "DROP KEYSPACE IF EXISTS " + mapping.getKeySpace().getName();
}
/**
* This method returns the CQL query to truncate (removes all the data) in the table.
* refer : http://docs.datastax.com/en/cql/3.1/cql/cql_reference/truncate_r.html
*
* @param mapping Cassandra Mapping {@link CassandraMapping}
* @return CQL query
*/
static String getTruncateTableQuery(CassandraMapping mapping) {
return QueryBuilder.truncate(mapping.getKeySpace().getName(), mapping.getCoreName()).getQueryString();
}
/**
* This method return the CQL query to insert data in to the table.
* refer : http://docs.datastax.com/en/cql/3.1/cql/cql_reference/insert_r.html
*
* @param mapping Cassandra Mapping {@link CassandraMapping}
* @param fields available fields
* @return CQL Query
*/
static String getInsertDataQuery(CassandraMapping mapping, List<String> fields) {
String[] columnNames = getColumnNames(mapping, fields);
String[] objects = new String[fields.size()];
Arrays.fill(objects, "?");
return QueryBuilder.insertInto(mapping.getKeySpace().getName(), mapping.getCoreName()).values(columnNames, objects).getQueryString();
}
/**
* This method return the CQL query to delete a persistent in the table.
* refer : http://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlDelete.html
*
* @param mapping Cassandra Mapping {@link CassandraMapping}
* @param fields filed list to be deleted
* @return CQL Query
*/
static String getDeleteDataQuery(CassandraMapping mapping, List<String> fields) {
String[] columnNames = getColumnNames(mapping, fields);
String[] objects = new String[fields.size()];
Arrays.fill(objects, "?");
Delete delete = QueryBuilder.delete().from(mapping.getKeySpace().getName(), mapping.getCoreName());
return processKeys(columnNames, delete);
}
private static String processKeys(String[] columnNames, BuiltStatement delete) {
BuiltStatement query = null;
boolean isWhereNeeded = true;
for (String columnName : columnNames) {
if (isWhereNeeded) {
if (delete instanceof Delete) {
query = ((Delete) delete).where(QueryBuilder.eq(columnName, "?"));
} else {
query = ((Select) delete).where(QueryBuilder.eq(columnName, "?"));
}
isWhereNeeded = false;
} else {
if (delete instanceof Delete) {
query = ((Delete.Where) query).and(QueryBuilder.eq(columnName, "?"));
} else {
query = ((Select.Where) query).and(QueryBuilder.eq(columnName, "?"));
}
}
}
return query != null ? query.getQueryString() : null;
}
/**
* This method returns the CQL Select query to retrieve data from the table.
* refer: http://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlSelect.html
*
* @param mapping Cassandra Mapping {@link CassandraMapping}
* @param keyFields key fields
* @return CQL Query
*/
static String getSelectObjectQuery(CassandraMapping mapping, List<String> keyFields) {
Select select = QueryBuilder.select().from(mapping.getKeySpace().getName(), mapping.getCoreName());
if (Boolean.parseBoolean(mapping.getProperty("allowFiltering"))) {
select.allowFiltering();
}
String[] columnNames = getColumnNames(mapping, keyFields);
return processKeys(columnNames, select);
}
/**
* This method returns CQL Select query to retrieve data from the table with given fields.
* This method is used for Avro Serialization
* refer: http://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlSelect.html
*
* @param mapping Cassandra Mapping {@link CassandraMapping}
* @param fields Given fields to retrieve
* @param keyFields key fields
* @return CQL Query
*/
static String getSelectObjectWithFieldsQuery(CassandraMapping mapping, String[] fields, List<String> keyFields) {
Select select = QueryBuilder.select(getColumnNames(mapping, Arrays.asList(fields))).from(mapping.getKeySpace().getName(), mapping.getCoreName());
if (Boolean.parseBoolean(mapping.getProperty("allowFiltering"))) {
select.allowFiltering();
}
String[] columnNames = getColumnNames(mapping, keyFields);
return processKeys(columnNames, select);
}
/**
* This method returns CQL Select query to check if a key exists. This method
* is used for Avro Serialization refer:
* http://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlSelect.html
*
* @param mapping Cassandra Mapping {@link CassandraMapping}
* @param keyFields key fields
* @return CQL Query
*/
static String getCheckExistsQuery(CassandraMapping mapping, List<String> keyFields) {
Select select = QueryBuilder.select().countAll().from(mapping.getKeySpace().getName(), mapping.getCoreName());
if (Boolean.parseBoolean(mapping.getProperty("allowFiltering"))) {
select.allowFiltering();
}
String[] columnNames = getColumnNames(mapping, keyFields);
return processKeys(columnNames, select);
}
/**
* This method returns CQL Select query to retrieve data from the table with given fields.
* This method is used for Native Serialization
* refer: http://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlSelect.html
*
* @param mapping Cassandra Mapping {@link CassandraMapping}
* @param fields Given fields to retrieve
* @return CQL Query
*/
static String getSelectObjectWithFieldsQuery(CassandraMapping mapping, String[] fields) {
String cqlQuery = null;
String[] columnNames = getColumnNames(mapping, Arrays.asList(fields));
Select select = QueryBuilder.select(columnNames).from(mapping.getKeySpace().getName(), mapping.getCoreName());
if (Boolean.parseBoolean(mapping.getProperty("allowFiltering"))) {
select.allowFiltering();
}
CassandraKey cKey = mapping.getCassandraKey();
if (cKey != null) {
Select.Where query = null;
boolean isWhereNeeded = true;
for (Field field : cKey.getFieldList()) {
if (isWhereNeeded) {
query = select.where(QueryBuilder.eq(field.getColumnName(), "?"));
isWhereNeeded = false;
} else {
query = query.and(QueryBuilder.eq(field.getColumnName(), "?"));
}
}
cqlQuery = query != null ? query.getQueryString() : null;
} else {
for (Field field : mapping.getFieldList()) {
boolean isPrimaryKey = Boolean.parseBoolean(field.getProperty("primarykey"));
if (isPrimaryKey) {
cqlQuery = select.where(QueryBuilder.eq(field.getColumnName(), "?")).getQueryString();
break;
}
}
}
return cqlQuery;
}
/**
* This method returns CQL Query for execute method. This CQL contains a Select Query to retrieve data from the table
*
* @param mapping Cassandra Mapping {@link CassandraMapping}
* @param cassandraQuery Query {@link CassandraQuery}
* @param objects object list
* @return CQL Query
*/
static String getExecuteQuery(CassandraMapping mapping, Query cassandraQuery, List<Object> objects, String[] fields) {
long limit = cassandraQuery.getLimit();
Select select = QueryBuilder.select(getColumnNames(mapping, Arrays.asList(fields))).from(mapping.getKeySpace().getName(), mapping.getCoreName());
if (limit > 0) {
select = select.limit((int) limit);
}
if (Boolean.parseBoolean(mapping.getProperty("allowFiltering"))) {
select.allowFiltering();
}
return processQuery(cassandraQuery, select, mapping, objects);
}
private static String processQuery(Query cassandraQuery, BuiltStatement select, CassandraMapping mapping, List<Object> objects) {
String primaryKey = null;
BuiltStatement query = null;
Object startKey = cassandraQuery.getStartKey();
Object endKey = cassandraQuery.getEndKey();
Object key = cassandraQuery.getKey();
boolean isWhereNeeded = true;
if (key != null) {
if (mapping.getCassandraKey() != null) {
ArrayList<String> cassandraKeys = new ArrayList<>();
ArrayList<Object> cassandraValues = new ArrayList<>();
AvroCassandraUtils.processKeys(mapping, key, cassandraKeys, cassandraValues);
String[] columnKeys = getColumnNames(mapping, cassandraKeys);
for (int i = 0; i < cassandraKeys.size(); i++) {
if (isWhereNeeded) {
if (select instanceof Select) {
query = ((Select) select).where(QueryBuilder.eq(columnKeys[i], "?"));
} else if (select instanceof Delete) {
query = ((Delete) select).where(QueryBuilder.eq(columnKeys[i], "?"));
} else {
query = ((Update.Assignments) select).where(QueryBuilder.eq(columnKeys[i], "?"));
}
objects.add(cassandraValues.get(i));
isWhereNeeded = false;
} else {
if (select instanceof Select) {
query = ((Select.Where) query).and(QueryBuilder.eq(columnKeys[i], "?"));
} else if (select instanceof Delete) {
query = ((Delete.Where) query).and(QueryBuilder.eq(columnKeys[i], "?"));
} else {
query = ((Update.Where) query).and(QueryBuilder.eq(columnKeys[i], "?"));
}
objects.add(cassandraValues.get(i));
}
}
} else {
primaryKey = getPKey(mapping.getFieldList());
if (select instanceof Select) {
query = ((Select) select).where(QueryBuilder.eq(primaryKey, "?"));
} else if (select instanceof Delete) {
query = ((Delete) select).where(QueryBuilder.eq(primaryKey, "?"));
} else {
query = ((Update.Assignments) select).where(QueryBuilder.eq(primaryKey, "?"));
}
objects.add(key);
}
} else {
if (startKey != null) {
if (mapping.getCassandraKey() != null) {
ArrayList<String> cassandraKeys = new ArrayList<>();
ArrayList<Object> cassandraValues = new ArrayList<>();
AvroCassandraUtils.processKeys(mapping, startKey, cassandraKeys, cassandraValues);
String[] columnKeys = getColumnNames(mapping, cassandraKeys);
for (int i = 0; i < cassandraKeys.size(); i++) {
if (isWhereNeeded) {
if (select instanceof Select) {
query = ((Select) select).where(QueryBuilder.gte(columnKeys[i], "?"));
} else if (select instanceof Delete) {
/*
According to the JIRA https://issues.apache.org/jira/browse/CASSANDRA-7651 this has been fixed, but It seems this not fixed yet.
*/
throw new RuntimeException("Delete by Query is not suppoted for Key Ranges.");
} else {
query = ((Update.Assignments) select).where(QueryBuilder.gte(columnKeys[i], "?"));
}
objects.add(cassandraValues.get(i));
isWhereNeeded = false;
} else {
if (select instanceof Select) {
query = ((Select.Where) query).and(QueryBuilder.gte(columnKeys[i], "?"));
} else if (select instanceof Delete) {
/*
According to the JIRA https://issues.apache.org/jira/browse/CASSANDRA-7651 this has been fixed, but It seems this not fixed yet.
*/
throw new RuntimeException("Delete by Query is not suppoted for Key Ranges.");
} else {
query = ((Update.Where) query).and(QueryBuilder.gte(columnKeys[i], "?"));
}
objects.add(cassandraValues.get(i));
}
}
} else {
primaryKey = getPKey(mapping.getFieldList());
if (select instanceof Select) {
query = ((Select) select).where(QueryBuilder.gte(primaryKey, "?"));
} else if (select instanceof Delete) {
/*
According to the JIRA https://issues.apache.org/jira/browse/CASSANDRA-7651 this has been fixed, but It seems this not fixed yet.
*/
throw new RuntimeException("Delete by Query is not suppoted for Key Ranges.");
} else {
query = ((Update.Assignments) select).where(QueryBuilder.gte(primaryKey, "?"));
}
objects.add(startKey);
isWhereNeeded = false;
}
}
if (endKey != null) {
if (mapping.getCassandraKey() != null) {
ArrayList<String> cassandraKeys = new ArrayList<>();
ArrayList<Object> cassandraValues = new ArrayList<>();
AvroCassandraUtils.processKeys(mapping, endKey, cassandraKeys, cassandraValues);
String[] columnKeys = getColumnNames(mapping, cassandraKeys);
for (int i = 0; i < cassandraKeys.size(); i++) {
if (isWhereNeeded) {
if (select instanceof Select) {
query = ((Select) select).where(QueryBuilder.lte(columnKeys[i], "?"));
} else if (select instanceof Delete) {
/*
According to the JIRA https://issues.apache.org/jira/browse/CASSANDRA-7651 this has been fixed, but It seems this not fixed yet.
*/
throw new RuntimeException("Delete by Query is not suppoted for Key Ranges.");
} else {
query = ((Update.Assignments) select).where(QueryBuilder.lte(columnKeys[i], "?"));
}
objects.add(cassandraValues.get(i));
isWhereNeeded = false;
} else {
if (select instanceof Select) {
query = ((Select.Where) query).and(QueryBuilder.lte(columnKeys[i], "?"));
} else if (select instanceof Delete) {
/*
According to the JIRA https://issues.apache.org/jira/browse/CASSANDRA-7651 this has been fixed, but It seems this not fixed yet.
*/
throw new RuntimeException("Delete by Query is not suppoted for Key Ranges.");
} else {
query = ((Update.Where) query).and(QueryBuilder.lte(columnKeys[i], "?"));
}
objects.add(cassandraValues.get(i));
}
}
} else {
primaryKey = primaryKey != null ? primaryKey : getPKey(mapping.getFieldList());
if (isWhereNeeded) {
if (select instanceof Select) {
query = ((Select) select).where(QueryBuilder.lte(primaryKey, "?"));
} else if (select instanceof Delete) {
/*
According to the JIRA https://issues.apache.org/jira/browse/CASSANDRA-7651 this has been fixed, but It seems this not fixed yet.
*/
throw new RuntimeException("Delete by Query is not suppoted for Key Ranges.");
} else {
query = ((Update.Assignments) select).where(QueryBuilder.lte(primaryKey, "?"));
}
} else {
if (select instanceof Select) {
query = ((Select.Where) query).and(QueryBuilder.lte(primaryKey, "?"));
} else if (select instanceof Delete) {
/*
According to the JIRA https://issues.apache.org/jira/browse/CASSANDRA-7651 this has been fixed, but It seems this not fixed yet.
*/
throw new RuntimeException("Delete by Query is not suppoted for Key Ranges.");
} else {
query = ((Update.Where) query).and(QueryBuilder.lte(primaryKey, "?"));
}
}
objects.add(endKey);
}
}
}
if (startKey == null && endKey == null && key == null) {
return select.getQueryString();
}
return query != null ? query.getQueryString() : null;
}
private static String[] getColumnNames(CassandraMapping mapping, List<String> fields) {
ArrayList<String> columnNames = new ArrayList<>();
for (String field : fields) {
Field fieldBean = mapping.getFieldFromFieldName(field);
CassandraKey cassandraKey = mapping.getCassandraKey();
Field keyBean = null;
if (cassandraKey != null) {
keyBean = cassandraKey.getFieldFromFieldName(field);
}
if (fieldBean != null) {
columnNames.add(fieldBean.getColumnName());
} else if (keyBean != null) {
columnNames.add(keyBean.getColumnName());
} else {
LOG.warn("{} field is ignored, couldn't find relevant field in the persistent mapping", field);
}
}
return columnNames.toArray(new String[0]);
}
private static String getPKey(List<Field> fields) {
for (Field field : fields) {
boolean isPrimaryKey = Boolean.parseBoolean(field.getProperty("primarykey"));
if (isPrimaryKey) {
return field.getColumnName();
}
}
return null;
}
/**
* This method returns CQL Qeury for DeleteByQuery method.
* refer: http://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlDelete.html
*
* @param mapping Cassandra Mapping {@link CassandraMapping}
* @param cassandraQuery Cassandra Query {@link CassandraQuery}
* @param objects field values
* @return CQL Query
*/
static String getDeleteByQuery(CassandraMapping mapping, Query cassandraQuery, List<Object> objects) {
String[] columns = null;
if (cassandraQuery.getFields() != null) {
columns = getColumnNames(mapping, Arrays.asList(cassandraQuery.getFields()));
}
Delete delete;
if (columns != null) {
delete = QueryBuilder.delete(columns).from(mapping.getKeySpace().getName(), mapping.getCoreName());
} else {
delete = QueryBuilder.delete().from(mapping.getKeySpace().getName(), mapping.getCoreName());
}
return processQuery(cassandraQuery, delete, mapping, objects);
}
/**
* This method returns the CQL Query for UpdateByQuery method
* refer : http://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlUpdate.html
*
* @param mapping Cassandra mapping {@link CassandraMapping}
* @param cassandraQuery Cassandra Query {@link CassandraQuery}
* @param objects field Objects list
* @return CQL Query
*/
static String getUpdateByQueryForAvro(CassandraMapping mapping, Query cassandraQuery, List<Object> objects, Schema schema) {
Update update = QueryBuilder.update(mapping.getKeySpace().getName(), mapping.getCoreName());
Update.Assignments updateAssignments = null;
if (cassandraQuery instanceof CassandraQuery) {
String[] columnNames = getColumnNames(mapping, Arrays.asList(cassandraQuery.getFields()));
for (String column : columnNames) {
updateAssignments = update.with(QueryBuilder.set(column, "?"));
Field field = mapping.getFieldFromColumnName(column);
Object value = ((CassandraQuery) cassandraQuery).getUpdateFieldValue(field.getFieldName());
try {
Schema schemaField = schema.getField(field.getFieldName()).schema();
objects.add(AvroCassandraUtils.getFieldValueFromAvroBean(schemaField, schemaField.getType(), value, field));
} catch (NullPointerException e) {
throw new RuntimeException(field + " field couldn't find in the class " + mapping.getPersistentClass() + ".");
}
}
} else {
throw new RuntimeException("Please use Cassandra Query object to invoke, UpdateByQuery method.");
}
return processQuery(cassandraQuery, updateAssignments, mapping, objects);
}
/**
* This method returns the CQL Query for UpdateByQuery method
* refer : http://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlUpdate.html
*
* @param mapping Cassandra mapping {@link CassandraMapping}
* @param cassandraQuery Cassandra Query {@link CassandraQuery}
* @param objects field Objects list
* @return CQL Query
*/
static String getUpdateByQueryForNative(CassandraMapping mapping, Query cassandraQuery, List<Object> objects) {
Update update = QueryBuilder.update(mapping.getKeySpace().getName(), mapping.getCoreName());
Update.Assignments updateAssignments = null;
if (cassandraQuery instanceof CassandraQuery) {
String[] columnNames = getColumnNames(mapping, Arrays.asList(cassandraQuery.getFields()));
for (String column : columnNames) {
updateAssignments = update.with(QueryBuilder.set(column, "?"));
objects.add(((CassandraQuery) cassandraQuery).getUpdateFieldValue(mapping.getFieldFromColumnName(column).getFieldName()));
}
} else {
throw new RuntimeException("Please use Cassandra Query object to invoke, UpdateByQuery method.");
}
return processQuery(cassandraQuery, updateAssignments, mapping, objects);
}
private static void populateFieldsToQuery(Schema schema, StringBuilder builder) throws Exception {
switch (schema.getType()) {
case INT:
builder.append("int");
break;
case MAP:
builder.append("map<text,");
populateFieldsToQuery(schema.getValueType(), builder);
builder.append(">");
break;
case ARRAY:
builder.append("list<");
populateFieldsToQuery(schema.getElementType(), builder);
builder.append(">");
break;
case LONG:
builder.append("bigint");
break;
case FLOAT:
builder.append("float");
break;
case DOUBLE:
builder.append("double");
break;
case BOOLEAN:
builder.append("boolean");
break;
case BYTES:
builder.append("blob");
break;
case RECORD:
builder.append("frozen<").append(schema.getName()).append(">");
break;
case STRING:
case FIXED:
case ENUM:
builder.append("text");
break;
case UNION:
for (Schema unionElementSchema : schema.getTypes()) {
if (unionElementSchema.getType().equals(Schema.Type.RECORD)) {
String recordName = unionElementSchema.getName();
if (!builder.toString().contains(recordName)) {
builder.append("frozen<").append(recordName).append(">");
} else {
LOG.warn("Same Field Type can't be mapped recursively. This is not supported with Cassandra UDT types, Please use byte dataType for recursive mapping.");
throw new Exception("Same Field Type has mapped recursively");
}
break;
} else if (!unionElementSchema.getType().equals(Schema.Type.NULL)) {
populateFieldsToQuery(unionElementSchema, builder);
break;
}
}
break;
}
}
static void processRecord(Schema recordSchema, StringBuilder stringBuilder) {
boolean isCommaNeeded = false;
for (Schema.Field field : recordSchema.getFields()) {
if (isCommaNeeded) {
stringBuilder.append(", ");
}
String fieldName = field.name();
stringBuilder.append(fieldName).append(" ");
try {
populateFieldsToQuery(field.schema(), stringBuilder);
isCommaNeeded = true;
} catch (Exception e) {
int i = stringBuilder.indexOf(fieldName);
if (i != -1) {
stringBuilder.delete(i, i + fieldName.length());
isCommaNeeded = false;
}
}
}
}
static String getCreateUDTTypeForNative(CassandraMapping mapping, Class persistentClass, String udtType, String fieldName) throws NoSuchFieldException {
StringBuilder stringBuffer = new StringBuilder();
Class udtClass = persistentClass.getDeclaredField(fieldName).getType();
UDT annotation = (UDT) udtClass.getAnnotation(UDT.class);
if (annotation != null) {
stringBuffer.append("CREATE TYPE IF NOT EXISTS ").append(mapping.getKeySpace().getName()).append(".").append(udtType).append(" (");
boolean isCommaNeeded = false;
for (java.lang.reflect.Field udtField : udtClass.getDeclaredFields()) {
com.datastax.driver.mapping.annotations.Field fieldAnnotation = udtField.getDeclaredAnnotation(com.datastax.driver.mapping.annotations.Field.class);
if (fieldAnnotation != null) {
if (isCommaNeeded) {
stringBuffer.append(", ");
}
if (!fieldAnnotation.name().isEmpty()) {
stringBuffer.append(fieldAnnotation.name()).append(" ");
} else {
stringBuffer.append(udtField.getName()).append(" ");
}
stringBuffer.append(dataType(udtField, null));
isCommaNeeded = true;
}
}
stringBuffer.append(")");
} else {
throw new RuntimeException("");
}
return stringBuffer.toString();
}
static String getCreateUDTTypeForAvro(CassandraMapping mapping, String udtType, Schema fieldSchema) {
StringBuilder stringBuffer = new StringBuilder();
stringBuffer.append("CREATE TYPE IF NOT EXISTS ").append(mapping.getKeySpace().getName()).append(".").append(udtType).append(" (");
CassandraQueryFactory.processRecord(fieldSchema, stringBuffer);
stringBuffer.append(")");
return stringBuffer.toString();
}
private static String dataType(java.lang.reflect.Field field, Type fieldType) {
String type;
if (field != null) {
type = field.getType().getName();
} else {
type = fieldType.getTypeName();
}
String dataType;
String s = type;
if (s.equals("java.lang.String") || s.equals("java.lang.CharSequence")) {
dataType = "text";
} else if (s.equals("int") || s.equals("java.lang.Integer")) {
dataType = "int";
} else if (s.equals("double") || s.equals("java.lang.Double")) {
dataType = "double";
} else if (s.equals("float") || s.equals("java.lang.Float")) {
dataType = "float";
} else if (s.equals("boolean") || s.equals("java.lang.Boolean")) {
dataType = "boolean";
} else if (s.equals("java.util.UUID")) {
dataType = "uuid";
} else if (s.equals("java.lang.Long")) {
dataType = "bigint";
} else if (s.equals("java.math.BigDecimal")) {
dataType = "decimal";
} else if (s.equals("java.net.InetAddress")) {
dataType = "inet";
} else if (s.equals("java.math.BigInteger")) {
dataType = "varint";
} else if (s.equals("java.nio.ByteBuffer")) {
dataType = "blob";
} else if (s.contains("Map")) {
ParameterizedType mapType;
if (field != null) {
mapType = (ParameterizedType) field.getGenericType();
} else {
mapType = (ParameterizedType) fieldType;
}
Type value1 = mapType.getActualTypeArguments()[0];
Type value2 = mapType.getActualTypeArguments()[1];
dataType = "map<" + dataType(null, value1) + "," + dataType(null, value2) + ">";
} else if (s.contains("List")) {
ParameterizedType listType;
if (field != null) {
listType = (ParameterizedType) field.getGenericType();
} else {
listType = (ParameterizedType) fieldType;
}
Type value = listType.getActualTypeArguments()[0];
dataType = "list<" + dataType(null, value) + ">";
} else if (s.contains("Set")) {
ParameterizedType setType;
if (field != null) {
setType = (ParameterizedType) field.getGenericType();
} else {
setType = (ParameterizedType) fieldType;
}
Type value = setType.getActualTypeArguments()[0];
dataType = "set<" + dataType(null, value) + ">";
} else {
throw new RuntimeException("Unsupported Cassandra DataType");
}
return dataType;
}
}
|
googleapis/google-cloud-java | 35,893 | java-vision/proto-google-cloud-vision-v1p2beta1/src/main/java/com/google/cloud/vision/v1p2beta1/Feature.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/vision/v1p2beta1/image_annotator.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.vision.v1p2beta1;
/**
*
*
* <pre>
* The type of Google Cloud Vision API detection to perform, and the maximum
* number of results to return for that type. Multiple `Feature` objects can
* be specified in the `features` list.
* </pre>
*
* Protobuf type {@code google.cloud.vision.v1p2beta1.Feature}
*/
public final class Feature extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.vision.v1p2beta1.Feature)
FeatureOrBuilder {
private static final long serialVersionUID = 0L;
// Use Feature.newBuilder() to construct.
private Feature(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Feature() {
type_ = 0;
model_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Feature();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vision.v1p2beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p2beta1_Feature_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vision.v1p2beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p2beta1_Feature_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vision.v1p2beta1.Feature.class,
com.google.cloud.vision.v1p2beta1.Feature.Builder.class);
}
/**
*
*
* <pre>
* Type of Google Cloud Vision API feature to be extracted.
* </pre>
*
* Protobuf enum {@code google.cloud.vision.v1p2beta1.Feature.Type}
*/
public enum Type implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Unspecified feature type.
* </pre>
*
* <code>TYPE_UNSPECIFIED = 0;</code>
*/
TYPE_UNSPECIFIED(0),
/**
*
*
* <pre>
* Run face detection.
* </pre>
*
* <code>FACE_DETECTION = 1;</code>
*/
FACE_DETECTION(1),
/**
*
*
* <pre>
* Run landmark detection.
* </pre>
*
* <code>LANDMARK_DETECTION = 2;</code>
*/
LANDMARK_DETECTION(2),
/**
*
*
* <pre>
* Run logo detection.
* </pre>
*
* <code>LOGO_DETECTION = 3;</code>
*/
LOGO_DETECTION(3),
/**
*
*
* <pre>
* Run label detection.
* </pre>
*
* <code>LABEL_DETECTION = 4;</code>
*/
LABEL_DETECTION(4),
/**
*
*
* <pre>
* Run text detection / optical character recognition (OCR). Text detection
* is optimized for areas of text within a larger image; if the image is
* a document, use `DOCUMENT_TEXT_DETECTION` instead.
* </pre>
*
* <code>TEXT_DETECTION = 5;</code>
*/
TEXT_DETECTION(5),
/**
*
*
* <pre>
* Run dense text document OCR. Takes precedence when both
* `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` are present.
* </pre>
*
* <code>DOCUMENT_TEXT_DETECTION = 11;</code>
*/
DOCUMENT_TEXT_DETECTION(11),
/**
*
*
* <pre>
* Run Safe Search to detect potentially unsafe
* or undesirable content.
* </pre>
*
* <code>SAFE_SEARCH_DETECTION = 6;</code>
*/
SAFE_SEARCH_DETECTION(6),
/**
*
*
* <pre>
* Compute a set of image properties, such as the
* image's dominant colors.
* </pre>
*
* <code>IMAGE_PROPERTIES = 7;</code>
*/
IMAGE_PROPERTIES(7),
/**
*
*
* <pre>
* Run crop hints.
* </pre>
*
* <code>CROP_HINTS = 9;</code>
*/
CROP_HINTS(9),
/**
*
*
* <pre>
* Run web detection.
* </pre>
*
* <code>WEB_DETECTION = 10;</code>
*/
WEB_DETECTION(10),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Unspecified feature type.
* </pre>
*
* <code>TYPE_UNSPECIFIED = 0;</code>
*/
public static final int TYPE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Run face detection.
* </pre>
*
* <code>FACE_DETECTION = 1;</code>
*/
public static final int FACE_DETECTION_VALUE = 1;
/**
*
*
* <pre>
* Run landmark detection.
* </pre>
*
* <code>LANDMARK_DETECTION = 2;</code>
*/
public static final int LANDMARK_DETECTION_VALUE = 2;
/**
*
*
* <pre>
* Run logo detection.
* </pre>
*
* <code>LOGO_DETECTION = 3;</code>
*/
public static final int LOGO_DETECTION_VALUE = 3;
/**
*
*
* <pre>
* Run label detection.
* </pre>
*
* <code>LABEL_DETECTION = 4;</code>
*/
public static final int LABEL_DETECTION_VALUE = 4;
/**
*
*
* <pre>
* Run text detection / optical character recognition (OCR). Text detection
* is optimized for areas of text within a larger image; if the image is
* a document, use `DOCUMENT_TEXT_DETECTION` instead.
* </pre>
*
* <code>TEXT_DETECTION = 5;</code>
*/
public static final int TEXT_DETECTION_VALUE = 5;
/**
*
*
* <pre>
* Run dense text document OCR. Takes precedence when both
* `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` are present.
* </pre>
*
* <code>DOCUMENT_TEXT_DETECTION = 11;</code>
*/
public static final int DOCUMENT_TEXT_DETECTION_VALUE = 11;
/**
*
*
* <pre>
* Run Safe Search to detect potentially unsafe
* or undesirable content.
* </pre>
*
* <code>SAFE_SEARCH_DETECTION = 6;</code>
*/
public static final int SAFE_SEARCH_DETECTION_VALUE = 6;
/**
*
*
* <pre>
* Compute a set of image properties, such as the
* image's dominant colors.
* </pre>
*
* <code>IMAGE_PROPERTIES = 7;</code>
*/
public static final int IMAGE_PROPERTIES_VALUE = 7;
/**
*
*
* <pre>
* Run crop hints.
* </pre>
*
* <code>CROP_HINTS = 9;</code>
*/
public static final int CROP_HINTS_VALUE = 9;
/**
*
*
* <pre>
* Run web detection.
* </pre>
*
* <code>WEB_DETECTION = 10;</code>
*/
public static final int WEB_DETECTION_VALUE = 10;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static Type valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static Type forNumber(int value) {
switch (value) {
case 0:
return TYPE_UNSPECIFIED;
case 1:
return FACE_DETECTION;
case 2:
return LANDMARK_DETECTION;
case 3:
return LOGO_DETECTION;
case 4:
return LABEL_DETECTION;
case 5:
return TEXT_DETECTION;
case 11:
return DOCUMENT_TEXT_DETECTION;
case 6:
return SAFE_SEARCH_DETECTION;
case 7:
return IMAGE_PROPERTIES;
case 9:
return CROP_HINTS;
case 10:
return WEB_DETECTION;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Type> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<Type> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Type>() {
public Type findValueByNumber(int number) {
return Type.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.vision.v1p2beta1.Feature.getDescriptor().getEnumTypes().get(0);
}
private static final Type[] VALUES = values();
public static Type valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private Type(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.vision.v1p2beta1.Feature.Type)
}
public static final int TYPE_FIELD_NUMBER = 1;
private int type_ = 0;
/**
*
*
* <pre>
* The feature type.
* </pre>
*
* <code>.google.cloud.vision.v1p2beta1.Feature.Type type = 1;</code>
*
* @return The enum numeric value on the wire for type.
*/
@java.lang.Override
public int getTypeValue() {
return type_;
}
/**
*
*
* <pre>
* The feature type.
* </pre>
*
* <code>.google.cloud.vision.v1p2beta1.Feature.Type type = 1;</code>
*
* @return The type.
*/
@java.lang.Override
public com.google.cloud.vision.v1p2beta1.Feature.Type getType() {
com.google.cloud.vision.v1p2beta1.Feature.Type result =
com.google.cloud.vision.v1p2beta1.Feature.Type.forNumber(type_);
return result == null ? com.google.cloud.vision.v1p2beta1.Feature.Type.UNRECOGNIZED : result;
}
public static final int MAX_RESULTS_FIELD_NUMBER = 2;
private int maxResults_ = 0;
/**
*
*
* <pre>
* Maximum number of results of this type. Does not apply to
* `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`.
* </pre>
*
* <code>int32 max_results = 2;</code>
*
* @return The maxResults.
*/
@java.lang.Override
public int getMaxResults() {
return maxResults_;
}
public static final int MODEL_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object model_ = "";
/**
*
*
* <pre>
* Model to use for the feature.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest". `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` also
* support "builtin/weekly" for the bleeding edge release updated weekly.
* </pre>
*
* <code>string model = 3;</code>
*
* @return The model.
*/
@java.lang.Override
public java.lang.String getModel() {
java.lang.Object ref = model_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
model_ = s;
return s;
}
}
/**
*
*
* <pre>
* Model to use for the feature.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest". `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` also
* support "builtin/weekly" for the bleeding edge release updated weekly.
* </pre>
*
* <code>string model = 3;</code>
*
* @return The bytes for model.
*/
@java.lang.Override
public com.google.protobuf.ByteString getModelBytes() {
java.lang.Object ref = model_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
model_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (type_ != com.google.cloud.vision.v1p2beta1.Feature.Type.TYPE_UNSPECIFIED.getNumber()) {
output.writeEnum(1, type_);
}
if (maxResults_ != 0) {
output.writeInt32(2, maxResults_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(model_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, model_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (type_ != com.google.cloud.vision.v1p2beta1.Feature.Type.TYPE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, type_);
}
if (maxResults_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, maxResults_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(model_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, model_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.vision.v1p2beta1.Feature)) {
return super.equals(obj);
}
com.google.cloud.vision.v1p2beta1.Feature other =
(com.google.cloud.vision.v1p2beta1.Feature) obj;
if (type_ != other.type_) return false;
if (getMaxResults() != other.getMaxResults()) return false;
if (!getModel().equals(other.getModel())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + TYPE_FIELD_NUMBER;
hash = (53 * hash) + type_;
hash = (37 * hash) + MAX_RESULTS_FIELD_NUMBER;
hash = (53 * hash) + getMaxResults();
hash = (37 * hash) + MODEL_FIELD_NUMBER;
hash = (53 * hash) + getModel().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.vision.v1p2beta1.Feature parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1p2beta1.Feature parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1p2beta1.Feature parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1p2beta1.Feature parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1p2beta1.Feature parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1p2beta1.Feature parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1p2beta1.Feature parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1p2beta1.Feature parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vision.v1p2beta1.Feature parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1p2beta1.Feature parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vision.v1p2beta1.Feature parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1p2beta1.Feature parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.vision.v1p2beta1.Feature prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The type of Google Cloud Vision API detection to perform, and the maximum
* number of results to return for that type. Multiple `Feature` objects can
* be specified in the `features` list.
* </pre>
*
* Protobuf type {@code google.cloud.vision.v1p2beta1.Feature}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.vision.v1p2beta1.Feature)
com.google.cloud.vision.v1p2beta1.FeatureOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vision.v1p2beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p2beta1_Feature_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vision.v1p2beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p2beta1_Feature_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vision.v1p2beta1.Feature.class,
com.google.cloud.vision.v1p2beta1.Feature.Builder.class);
}
// Construct using com.google.cloud.vision.v1p2beta1.Feature.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
type_ = 0;
maxResults_ = 0;
model_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.vision.v1p2beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p2beta1_Feature_descriptor;
}
@java.lang.Override
public com.google.cloud.vision.v1p2beta1.Feature getDefaultInstanceForType() {
return com.google.cloud.vision.v1p2beta1.Feature.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.vision.v1p2beta1.Feature build() {
com.google.cloud.vision.v1p2beta1.Feature result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.vision.v1p2beta1.Feature buildPartial() {
com.google.cloud.vision.v1p2beta1.Feature result =
new com.google.cloud.vision.v1p2beta1.Feature(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.vision.v1p2beta1.Feature result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.type_ = type_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.maxResults_ = maxResults_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.model_ = model_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.vision.v1p2beta1.Feature) {
return mergeFrom((com.google.cloud.vision.v1p2beta1.Feature) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.vision.v1p2beta1.Feature other) {
if (other == com.google.cloud.vision.v1p2beta1.Feature.getDefaultInstance()) return this;
if (other.type_ != 0) {
setTypeValue(other.getTypeValue());
}
if (other.getMaxResults() != 0) {
setMaxResults(other.getMaxResults());
}
if (!other.getModel().isEmpty()) {
model_ = other.model_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
type_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 16:
{
maxResults_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
model_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int type_ = 0;
/**
*
*
* <pre>
* The feature type.
* </pre>
*
* <code>.google.cloud.vision.v1p2beta1.Feature.Type type = 1;</code>
*
* @return The enum numeric value on the wire for type.
*/
@java.lang.Override
public int getTypeValue() {
return type_;
}
/**
*
*
* <pre>
* The feature type.
* </pre>
*
* <code>.google.cloud.vision.v1p2beta1.Feature.Type type = 1;</code>
*
* @param value The enum numeric value on the wire for type to set.
* @return This builder for chaining.
*/
public Builder setTypeValue(int value) {
type_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The feature type.
* </pre>
*
* <code>.google.cloud.vision.v1p2beta1.Feature.Type type = 1;</code>
*
* @return The type.
*/
@java.lang.Override
public com.google.cloud.vision.v1p2beta1.Feature.Type getType() {
com.google.cloud.vision.v1p2beta1.Feature.Type result =
com.google.cloud.vision.v1p2beta1.Feature.Type.forNumber(type_);
return result == null ? com.google.cloud.vision.v1p2beta1.Feature.Type.UNRECOGNIZED : result;
}
/**
*
*
* <pre>
* The feature type.
* </pre>
*
* <code>.google.cloud.vision.v1p2beta1.Feature.Type type = 1;</code>
*
* @param value The type to set.
* @return This builder for chaining.
*/
public Builder setType(com.google.cloud.vision.v1p2beta1.Feature.Type value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
type_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* The feature type.
* </pre>
*
* <code>.google.cloud.vision.v1p2beta1.Feature.Type type = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearType() {
bitField0_ = (bitField0_ & ~0x00000001);
type_ = 0;
onChanged();
return this;
}
private int maxResults_;
/**
*
*
* <pre>
* Maximum number of results of this type. Does not apply to
* `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`.
* </pre>
*
* <code>int32 max_results = 2;</code>
*
* @return The maxResults.
*/
@java.lang.Override
public int getMaxResults() {
return maxResults_;
}
/**
*
*
* <pre>
* Maximum number of results of this type. Does not apply to
* `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`.
* </pre>
*
* <code>int32 max_results = 2;</code>
*
* @param value The maxResults to set.
* @return This builder for chaining.
*/
public Builder setMaxResults(int value) {
maxResults_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Maximum number of results of this type. Does not apply to
* `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`.
* </pre>
*
* <code>int32 max_results = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearMaxResults() {
bitField0_ = (bitField0_ & ~0x00000002);
maxResults_ = 0;
onChanged();
return this;
}
private java.lang.Object model_ = "";
/**
*
*
* <pre>
* Model to use for the feature.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest". `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` also
* support "builtin/weekly" for the bleeding edge release updated weekly.
* </pre>
*
* <code>string model = 3;</code>
*
* @return The model.
*/
public java.lang.String getModel() {
java.lang.Object ref = model_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
model_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Model to use for the feature.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest". `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` also
* support "builtin/weekly" for the bleeding edge release updated weekly.
* </pre>
*
* <code>string model = 3;</code>
*
* @return The bytes for model.
*/
public com.google.protobuf.ByteString getModelBytes() {
java.lang.Object ref = model_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
model_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Model to use for the feature.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest". `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` also
* support "builtin/weekly" for the bleeding edge release updated weekly.
* </pre>
*
* <code>string model = 3;</code>
*
* @param value The model to set.
* @return This builder for chaining.
*/
public Builder setModel(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
model_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Model to use for the feature.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest". `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` also
* support "builtin/weekly" for the bleeding edge release updated weekly.
* </pre>
*
* <code>string model = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearModel() {
model_ = getDefaultInstance().getModel();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Model to use for the feature.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest". `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` also
* support "builtin/weekly" for the bleeding edge release updated weekly.
* </pre>
*
* <code>string model = 3;</code>
*
* @param value The bytes for model to set.
* @return This builder for chaining.
*/
public Builder setModelBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
model_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.vision.v1p2beta1.Feature)
}
// @@protoc_insertion_point(class_scope:google.cloud.vision.v1p2beta1.Feature)
private static final com.google.cloud.vision.v1p2beta1.Feature DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.vision.v1p2beta1.Feature();
}
public static com.google.cloud.vision.v1p2beta1.Feature getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Feature> PARSER =
new com.google.protobuf.AbstractParser<Feature>() {
@java.lang.Override
public Feature parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<Feature> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Feature> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.vision.v1p2beta1.Feature getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,037 | java-visionai/proto-google-cloud-visionai-v1/src/main/java/com/google/cloud/visionai/v1/ViewCollectionItemsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/visionai/v1/warehouse.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.visionai.v1;
/**
*
*
* <pre>
* Response message for ViewCollectionItems.
* </pre>
*
* Protobuf type {@code google.cloud.visionai.v1.ViewCollectionItemsResponse}
*/
public final class ViewCollectionItemsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.visionai.v1.ViewCollectionItemsResponse)
ViewCollectionItemsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ViewCollectionItemsResponse.newBuilder() to construct.
private ViewCollectionItemsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ViewCollectionItemsResponse() {
items_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ViewCollectionItemsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_ViewCollectionItemsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_ViewCollectionItemsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.visionai.v1.ViewCollectionItemsResponse.class,
com.google.cloud.visionai.v1.ViewCollectionItemsResponse.Builder.class);
}
public static final int ITEMS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.visionai.v1.CollectionItem> items_;
/**
*
*
* <pre>
* The items from the specified collection.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.CollectionItem items = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.visionai.v1.CollectionItem> getItemsList() {
return items_;
}
/**
*
*
* <pre>
* The items from the specified collection.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.CollectionItem items = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.visionai.v1.CollectionItemOrBuilder>
getItemsOrBuilderList() {
return items_;
}
/**
*
*
* <pre>
* The items from the specified collection.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.CollectionItem items = 1;</code>
*/
@java.lang.Override
public int getItemsCount() {
return items_.size();
}
/**
*
*
* <pre>
* The items from the specified collection.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.CollectionItem items = 1;</code>
*/
@java.lang.Override
public com.google.cloud.visionai.v1.CollectionItem getItems(int index) {
return items_.get(index);
}
/**
*
*
* <pre>
* The items from the specified collection.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.CollectionItem items = 1;</code>
*/
@java.lang.Override
public com.google.cloud.visionai.v1.CollectionItemOrBuilder getItemsOrBuilder(int index) {
return items_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < items_.size(); i++) {
output.writeMessage(1, items_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < items_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, items_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.visionai.v1.ViewCollectionItemsResponse)) {
return super.equals(obj);
}
com.google.cloud.visionai.v1.ViewCollectionItemsResponse other =
(com.google.cloud.visionai.v1.ViewCollectionItemsResponse) obj;
if (!getItemsList().equals(other.getItemsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getItemsCount() > 0) {
hash = (37 * hash) + ITEMS_FIELD_NUMBER;
hash = (53 * hash) + getItemsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.visionai.v1.ViewCollectionItemsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.ViewCollectionItemsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ViewCollectionItemsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.ViewCollectionItemsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ViewCollectionItemsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.ViewCollectionItemsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ViewCollectionItemsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.ViewCollectionItemsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ViewCollectionItemsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.ViewCollectionItemsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ViewCollectionItemsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.ViewCollectionItemsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.visionai.v1.ViewCollectionItemsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for ViewCollectionItems.
* </pre>
*
* Protobuf type {@code google.cloud.visionai.v1.ViewCollectionItemsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.visionai.v1.ViewCollectionItemsResponse)
com.google.cloud.visionai.v1.ViewCollectionItemsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_ViewCollectionItemsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_ViewCollectionItemsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.visionai.v1.ViewCollectionItemsResponse.class,
com.google.cloud.visionai.v1.ViewCollectionItemsResponse.Builder.class);
}
// Construct using com.google.cloud.visionai.v1.ViewCollectionItemsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (itemsBuilder_ == null) {
items_ = java.util.Collections.emptyList();
} else {
items_ = null;
itemsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_ViewCollectionItemsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.visionai.v1.ViewCollectionItemsResponse getDefaultInstanceForType() {
return com.google.cloud.visionai.v1.ViewCollectionItemsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.visionai.v1.ViewCollectionItemsResponse build() {
com.google.cloud.visionai.v1.ViewCollectionItemsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.visionai.v1.ViewCollectionItemsResponse buildPartial() {
com.google.cloud.visionai.v1.ViewCollectionItemsResponse result =
new com.google.cloud.visionai.v1.ViewCollectionItemsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.visionai.v1.ViewCollectionItemsResponse result) {
if (itemsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
items_ = java.util.Collections.unmodifiableList(items_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.items_ = items_;
} else {
result.items_ = itemsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.visionai.v1.ViewCollectionItemsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.visionai.v1.ViewCollectionItemsResponse) {
return mergeFrom((com.google.cloud.visionai.v1.ViewCollectionItemsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.visionai.v1.ViewCollectionItemsResponse other) {
if (other == com.google.cloud.visionai.v1.ViewCollectionItemsResponse.getDefaultInstance())
return this;
if (itemsBuilder_ == null) {
if (!other.items_.isEmpty()) {
if (items_.isEmpty()) {
items_ = other.items_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureItemsIsMutable();
items_.addAll(other.items_);
}
onChanged();
}
} else {
if (!other.items_.isEmpty()) {
if (itemsBuilder_.isEmpty()) {
itemsBuilder_.dispose();
itemsBuilder_ = null;
items_ = other.items_;
bitField0_ = (bitField0_ & ~0x00000001);
itemsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getItemsFieldBuilder()
: null;
} else {
itemsBuilder_.addAllMessages(other.items_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.visionai.v1.CollectionItem m =
input.readMessage(
com.google.cloud.visionai.v1.CollectionItem.parser(), extensionRegistry);
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
items_.add(m);
} else {
itemsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.visionai.v1.CollectionItem> items_ =
java.util.Collections.emptyList();
private void ensureItemsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
items_ = new java.util.ArrayList<com.google.cloud.visionai.v1.CollectionItem>(items_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.visionai.v1.CollectionItem,
com.google.cloud.visionai.v1.CollectionItem.Builder,
com.google.cloud.visionai.v1.CollectionItemOrBuilder>
itemsBuilder_;
/**
*
*
* <pre>
* The items from the specified collection.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.CollectionItem items = 1;</code>
*/
public java.util.List<com.google.cloud.visionai.v1.CollectionItem> getItemsList() {
if (itemsBuilder_ == null) {
return java.util.Collections.unmodifiableList(items_);
} else {
return itemsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The items from the specified collection.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.CollectionItem items = 1;</code>
*/
public int getItemsCount() {
if (itemsBuilder_ == null) {
return items_.size();
} else {
return itemsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The items from the specified collection.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.CollectionItem items = 1;</code>
*/
public com.google.cloud.visionai.v1.CollectionItem getItems(int index) {
if (itemsBuilder_ == null) {
return items_.get(index);
} else {
return itemsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The items from the specified collection.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.CollectionItem items = 1;</code>
*/
public Builder setItems(int index, com.google.cloud.visionai.v1.CollectionItem value) {
if (itemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureItemsIsMutable();
items_.set(index, value);
onChanged();
} else {
itemsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The items from the specified collection.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.CollectionItem items = 1;</code>
*/
public Builder setItems(
int index, com.google.cloud.visionai.v1.CollectionItem.Builder builderForValue) {
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
items_.set(index, builderForValue.build());
onChanged();
} else {
itemsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The items from the specified collection.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.CollectionItem items = 1;</code>
*/
public Builder addItems(com.google.cloud.visionai.v1.CollectionItem value) {
if (itemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureItemsIsMutable();
items_.add(value);
onChanged();
} else {
itemsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The items from the specified collection.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.CollectionItem items = 1;</code>
*/
public Builder addItems(int index, com.google.cloud.visionai.v1.CollectionItem value) {
if (itemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureItemsIsMutable();
items_.add(index, value);
onChanged();
} else {
itemsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The items from the specified collection.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.CollectionItem items = 1;</code>
*/
public Builder addItems(com.google.cloud.visionai.v1.CollectionItem.Builder builderForValue) {
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
items_.add(builderForValue.build());
onChanged();
} else {
itemsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The items from the specified collection.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.CollectionItem items = 1;</code>
*/
public Builder addItems(
int index, com.google.cloud.visionai.v1.CollectionItem.Builder builderForValue) {
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
items_.add(index, builderForValue.build());
onChanged();
} else {
itemsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The items from the specified collection.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.CollectionItem items = 1;</code>
*/
public Builder addAllItems(
java.lang.Iterable<? extends com.google.cloud.visionai.v1.CollectionItem> values) {
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, items_);
onChanged();
} else {
itemsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The items from the specified collection.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.CollectionItem items = 1;</code>
*/
public Builder clearItems() {
if (itemsBuilder_ == null) {
items_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
itemsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The items from the specified collection.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.CollectionItem items = 1;</code>
*/
public Builder removeItems(int index) {
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
items_.remove(index);
onChanged();
} else {
itemsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The items from the specified collection.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.CollectionItem items = 1;</code>
*/
public com.google.cloud.visionai.v1.CollectionItem.Builder getItemsBuilder(int index) {
return getItemsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The items from the specified collection.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.CollectionItem items = 1;</code>
*/
public com.google.cloud.visionai.v1.CollectionItemOrBuilder getItemsOrBuilder(int index) {
if (itemsBuilder_ == null) {
return items_.get(index);
} else {
return itemsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The items from the specified collection.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.CollectionItem items = 1;</code>
*/
public java.util.List<? extends com.google.cloud.visionai.v1.CollectionItemOrBuilder>
getItemsOrBuilderList() {
if (itemsBuilder_ != null) {
return itemsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(items_);
}
}
/**
*
*
* <pre>
* The items from the specified collection.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.CollectionItem items = 1;</code>
*/
public com.google.cloud.visionai.v1.CollectionItem.Builder addItemsBuilder() {
return getItemsFieldBuilder()
.addBuilder(com.google.cloud.visionai.v1.CollectionItem.getDefaultInstance());
}
/**
*
*
* <pre>
* The items from the specified collection.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.CollectionItem items = 1;</code>
*/
public com.google.cloud.visionai.v1.CollectionItem.Builder addItemsBuilder(int index) {
return getItemsFieldBuilder()
.addBuilder(index, com.google.cloud.visionai.v1.CollectionItem.getDefaultInstance());
}
/**
*
*
* <pre>
* The items from the specified collection.
* </pre>
*
* <code>repeated .google.cloud.visionai.v1.CollectionItem items = 1;</code>
*/
public java.util.List<com.google.cloud.visionai.v1.CollectionItem.Builder>
getItemsBuilderList() {
return getItemsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.visionai.v1.CollectionItem,
com.google.cloud.visionai.v1.CollectionItem.Builder,
com.google.cloud.visionai.v1.CollectionItemOrBuilder>
getItemsFieldBuilder() {
if (itemsBuilder_ == null) {
itemsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.visionai.v1.CollectionItem,
com.google.cloud.visionai.v1.CollectionItem.Builder,
com.google.cloud.visionai.v1.CollectionItemOrBuilder>(
items_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
items_ = null;
}
return itemsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.visionai.v1.ViewCollectionItemsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.visionai.v1.ViewCollectionItemsResponse)
private static final com.google.cloud.visionai.v1.ViewCollectionItemsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.visionai.v1.ViewCollectionItemsResponse();
}
public static com.google.cloud.visionai.v1.ViewCollectionItemsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ViewCollectionItemsResponse> PARSER =
new com.google.protobuf.AbstractParser<ViewCollectionItemsResponse>() {
@java.lang.Override
public ViewCollectionItemsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ViewCollectionItemsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ViewCollectionItemsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.visionai.v1.ViewCollectionItemsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/drill | 36,202 | exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistryImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.store;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import org.apache.drill.common.collections.ImmutableEntry;
import org.apache.drill.common.exceptions.ExecutionSetupException;
import org.apache.drill.common.exceptions.UserException;
import org.apache.drill.common.logical.StoragePluginConfig.AuthMode;
import org.apache.drill.common.logical.FormatPluginConfig;
import org.apache.drill.common.logical.StoragePluginConfig;
import org.apache.drill.exec.planner.logical.StoragePlugins;
import org.apache.drill.exec.server.DrillbitContext;
import org.apache.drill.exec.store.PluginHandle.PluginType;
import org.apache.drill.exec.store.dfs.FileSystemConfig;
import org.apache.drill.exec.store.dfs.FormatPlugin;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.cache.RemovalListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.exc.InvalidTypeIdException;
import com.fasterxml.jackson.databind.exc.UnrecognizedPropertyException;
/**
* Plugin registry. Caches plugin instances which correspond to configurations
* stored in persistent storage. Synchronizes the instances and storage.
* <p>
* Allows multiple "locators" to provide plugin classes such as the "classic"
* version for classes in the same class loader, the "system" version for
* system-defined plugins.
* <p>
* provides multiple layers of abstraction:
* <ul>
* <li>A plugin config/implementation pair (called a "connector" here)
* is located by</li>
* <li>A connector locator, which also provides bootstrap plugins and can
* create a plugin instance from a configuration, which are cached in</li>
* <li>The plugin cache, which holds stored, system and ad-hoc plugins. The
* stored plugins are backed by</li>
* <li>A persistent store: the file system for tests and embedded, ZK for
* a distibuted server, or</li>
* <li>An ephemeral cache for unnamed configs, such as those created by
* a table function.</li>
* </ul>
* <p>
* The idea is to push most functionality into the above abstractions,
* leaving overall coordination here.
* <p>
* Plugins themselves have multiple levels of definitions:
* <ul>
* <li>The config and plugin classes, provided by the locator.</li>
* <li>The {@link ConnectorHandle} which defines the config class and
* the locator which can create instances of that class.</li>
* <li>A config instance which is typically deserialized from JSON
* independent of the implementation class.</li>
* <li>A {@link PluginHandle} which pairs the config with a name as
* the unit that the user thinks of as a "plugin." The plugin entry
* links to the {@code ConnectorEntry} to create the instance lazily
* when first requested.</li>
* <li>The plugin class instance, which provides long-term state and
* which provides the logic for the plugin.</li>
* </ul>
*
* <h4>Concurrency</h4>
*
* Drill is a concurrent system; multiple users can attempt to add, remove
* and update plugin configurations at the same time. The only good
* solution would be to version the plugin configs. Instead, we rely on
* the fact that configs change infrequently.
* <p>
* The code syncs the in-memory cache with the persistent store on each
* access (which is actually inefficient and should be reviewed.)
* <p>
* During refresh, it could be that another thread is doing exactly
* the same thing, or even fighting us by changing the config. It is
* impossible to ensure a totally consistent answer. The goal is to
* make sure that the cache ends up agreeing with the persistent store
* as it was at some point in time.
* <p>
* The {@link StoragePluginMap} class provides in-memory synchronization of the
* name and config maps. Careful coding is needed when handling refresh
* since another thread could make the same changes.
* <p>
* Once the planner obtains a plugin, another user could come along and
* change the config for that plugin. Drill treats that change as another
* plugin: the original one continues to be used by the planner (but see
* below), while new queries use the new version.
* <p>
* Since the config on remote servers may have changed relative to the one
* this Foreman used for planning, the plan includes the plugin config
* itself (not just a reference to the config.) This works because the
* config is usually small.
*
* <h4>Ephemeral Plugins</h4>
*
* An ephemeral plugin handles table functions which create a temporary,
* unnamed configuration that is needed only for the execution of a
* single query, but which may be used across many threads. If the same
* table function is used multiple times, then the same ephemeral plugin
* will be used across queries. Ephemeral plugins are are based on the
* same connectors as stored plugins, but are not visible to the planner.
* They will expire after some time or number.
* <p>
* The ephemeral store also acts as a graveyard for deleted or changed
* plugins. When removing a plugin, the old plugin is moved to ephemeral
* storage to allow running queries to locate it. Similarly, when a
* new configuration is stored, the corresponding plugin is retrieved
* from ephemeral storage, if it exists. This avoids odd cases where
* the same plugin exists in both normal and ephemeral storage.
*
* <h4>Caveats</h4>
*
* The main problem with synchronization at present is that plugins
* provide a {@code close()} method that, if used, could render the
* plugin unusable. Suppose a Cassandra plugin, say, maintains a connection
* to a server used across multiple queries and threads. Any change to
* the config immediately calls {@code close()} on the plugin, even though
* it may be in use in planning a query on another thread. Random failures
* will result.
* <p>
* The same issue can affect ephemeral plugins: if the number in the cache
* reaches the limit, the registry will start closing old ones, without
* knowning if that plugin is actually in use.
* <p>
* The workaround is to not actually honor the {@code close()} call. Longer
* term, a reference count is needed.
*
* <h4>Error Handling</h4>
*
* Error handling needs review. Those problems that result from user actions
* should be raised as a {@code UserException}. Those that violate invariants
* as other forms of exception.
*/
public class StoragePluginRegistryImpl implements StoragePluginRegistry {
private static final Logger logger = LoggerFactory.getLogger(StoragePluginRegistryImpl.class);
private final PluginRegistryContext context;
/**
* Cache of enabled, stored plugins, as well as system and ad-hoc
* plugins. Plugins live in the cache until Drillbit exit, or
* (except for system plugins) explicitly removed.
*/
private final StoragePluginMap pluginCache;
private final DrillSchemaFactory schemaFactory;
private final StoragePluginStore pluginStore;
/**
* Cache of unnamed plugins typically resulting from table functions.
* Ephemeral plugins timeout after some time, or some max number of
* plugins.
*/
private final LoadingCache<StoragePluginConfig, PluginHandle> ephemeralPlugins;
/**
* Set of locators which provide connector implementations.
*/
private final List<ConnectorLocator> locators = new ArrayList<>();
/**
* Map of config (as deserialized from the persistent store or UI)
* to the connector which can instantiate a connector for that config.
*/
private final Map<Class<? extends StoragePluginConfig>, ConnectorHandle> connectors =
new IdentityHashMap<>();
public StoragePluginRegistryImpl(DrillbitContext context) {
this.context = new DrillbitPluginRegistryContext(context);
this.pluginCache = new StoragePluginMap();
this.schemaFactory = new DrillSchemaFactory(null);
locators.add(new ClassicConnectorLocator(this.context));
locators.add(new SystemPluginLocator(this.context));
this.pluginStore = new StoragePluginStoreImpl(context);
this.ephemeralPlugins = CacheBuilder.newBuilder()
.expireAfterAccess(24, TimeUnit.HOURS)
.maximumSize(250)
.removalListener(
(RemovalListener<StoragePluginConfig, PluginHandle>) notification -> notification.getValue().close())
.build(new CacheLoader<StoragePluginConfig, PluginHandle>() {
@Override
public PluginHandle load(StoragePluginConfig config) throws Exception {
return createPluginEntry("$$ephemeral$$", config, PluginType.EPHEMERAL);
}
});
}
@Override
public void init() {
locators.stream().forEach(loc -> loc.init());
try {
loadIntrinsicPlugins();
} catch (PluginException e) {
// Should only occur for a programming error
throw new IllegalStateException("Failed to load system plugins", e);
}
defineConnectors();
prepareStore();
}
private void loadIntrinsicPlugins() throws PluginException {
for (ConnectorLocator locator : locators) {
Collection<StoragePlugin> intrinsicPlugins = locator.intrinsicPlugins();
if (intrinsicPlugins == null) {
continue;
}
for (StoragePlugin sysPlugin : intrinsicPlugins) {
// Enforce lower case names. Since the name of a system plugin
// is "hard coded", we can't adjust it if it is not already
// lower case. All we can do is fail to tell the developer that
// something is wrong.
String origName = sysPlugin.getName();
String lcName = sysPlugin.getName().toLowerCase();
if (!origName.equals(lcName)) {
throw new IllegalStateException(String.format(
"Plugin names must be in lower case but system plugin name `%s` is not",
origName));
}
ConnectorHandle connector = ConnectorHandle.intrinsicConnector(locator, sysPlugin);
defineConnector(connector);
pluginCache.put(new PluginHandle(sysPlugin, connector, PluginType.INTRINSIC));
}
}
}
private void defineConnector(ConnectorHandle connector) {
ConnectorHandle prev = connectors.put(connector.configClass(), connector);
if (prev != null) {
String msg = String.format("Two connectors defined for the same config: " +
"%s -> %s and %s -> %s",
connector.configClass().getName(), connector.locator().getClass().getName(),
prev.configClass().getName(), prev.locator().getClass().getName());
logger.error(msg);
throw new IllegalStateException(msg);
}
}
private void defineConnectors() {
for (ConnectorLocator locator : locators) {
Set<Class<? extends StoragePluginConfig>> nonIntrinsicConfigs = locator.configClasses();
if (nonIntrinsicConfigs == null) {
continue;
}
for (Class<? extends StoragePluginConfig> configClass : nonIntrinsicConfigs) {
defineConnector(ConnectorHandle.configuredConnector(locator, configClass));
}
}
}
private void prepareStore() {
if (loadEnabledPlugins()) {
upgradeStore();
} else {
initStore();
}
}
private void initStore() {
logger.info("No storage plugin instances configured in persistent store, loading bootstrap configuration.");
StoragePlugins bootstrapPlugins = new StoragePlugins();
try {
for (ConnectorLocator locator : locators) {
StoragePlugins locatorPlugins = locator.bootstrapPlugins();
bootstrapPlugins.putAll(locatorPlugins);
}
} catch (IOException e) {
throw new IllegalStateException(
"Failure initializing the plugin store. Drillbit exiting.", e);
}
pluginStore.putAll(bootstrapPlugins);
locators.forEach(ConnectorLocator::onUpgrade);
}
/**
* Upgrade an existing persistent plugin config store with
* updates available from each locator.
*/
private void upgradeStore() {
StoragePlugins upgraded = new StoragePlugins();
for (ConnectorLocator locator : locators) {
StoragePlugins locatorPlugins = locator.updatedPlugins();
upgraded.putAll(locatorPlugins);
}
if (upgraded.isEmpty()) {
return;
}
for (Map.Entry<String, StoragePluginConfig> newPlugin : upgraded) {
StoragePluginConfig oldPluginConfig = getStoredConfig(newPlugin.getKey());
if (oldPluginConfig != null) {
copyPluginStatus(oldPluginConfig, newPlugin.getValue());
}
pluginStore.put(newPlugin.getKey(), newPlugin.getValue());
}
locators.forEach(ConnectorLocator::onUpgrade);
}
/**
* Identifies the enabled status for new storage plugins
* config. If this status is absent in the updater file, the status is kept
* from the configs, which are going to be updated
*
* @param oldPluginConfig
* current storage plugin config from Persistent Store or bootstrap
* config file
* @param newPluginConfig
* new storage plugin config
*/
protected static void copyPluginStatus(
StoragePluginConfig oldPluginConfig,
StoragePluginConfig newPluginConfig) {
if (!newPluginConfig.isEnabledStatusPresent()) {
boolean newStatus = oldPluginConfig != null && oldPluginConfig.isEnabled();
newPluginConfig.setEnabled(newStatus);
}
}
/**
* Initializes {@link #pluginCache} with currently enabled plugins
* defined in the persistent store.
*
* @return {@code true} if the persistent store contained plugins
* (and thus was initialized, and should perhaps be upgraded), or
* {@code false} if no plugins were found and this this is a new store
* which should be initialized. Avoids the need to check persistent
* store contents twice
*/
private boolean loadEnabledPlugins() {
Iterator<Entry<String, StoragePluginConfig>> allPlugins = pluginStore.load();
int count = 0;
while (allPlugins.hasNext()) {
count++;
Entry<String, StoragePluginConfig> plugin = allPlugins.next();
String name = plugin.getKey();
StoragePluginConfig config = plugin.getValue();
if (! config.isEnabled()) {
continue;
}
try {
pluginCache.put(createPluginEntry(name, config, PluginType.STORED));
} catch (Exception e) {
logger.error("Failure while setting up StoragePlugin with name: '{}', disabling.", name, e);
config.setEnabled(false);
pluginStore.put(name, config);
}
}
// If found at least one entry then this is an existing registry.
return count > 0;
}
@Override
public void put(String name, StoragePluginConfig config) throws PluginException {
name = validateName(name);
// Do not allow overwriting system plugins
// This same check is done later. However, we want to do this check
// before writing to the persistent store, which we must do before
// putting the plugin into the cache (where the second check is done.)
PluginHandle currentEntry = pluginCache.get(name);
if (currentEntry != null && currentEntry.isIntrinsic()) {
throw PluginException.systemPluginException(
"replace", name);
}
// Write to the store. We don't bother to update the cache; we could
// only update our own cache, not those of other Drillbits. We rely
// on the cache refresh mechanism to kick in when the Drillbit asks
// for the plugin instance.
pluginStore.put(name, config);
}
private String validateName(String name) throws PluginException {
if (name == null) {
throw new PluginException("Plugin name cannot be null");
}
name = name.trim().toLowerCase();
if (name.isEmpty()) {
throw new PluginException("Plugin name cannot be null");
}
return name;
}
@Override
public void validatedPut(String name, StoragePluginConfig config)
throws PluginException {
Exception lifecycleException = null;
name = validateName(name);
PluginHandle oldEntry;
if (config.isEnabled()) {
PluginHandle entry = restoreFromEphemeral(name, config);
try {
entry.plugin();
} catch (UserException e) {
// Provide helpful error messages.
throw new PluginException(e.getOriginalMessage(), e);
} catch (Exception e) {
throw new PluginException(String.format(
"Invalid plugin config for '%s', "
+ "Please switch to Logs panel from the UI then check the log.", name), e);
}
oldEntry = pluginCache.put(entry);
try {
if (oldEntry == null || !oldEntry.config().isEnabled()) {
// entry has the new plugin config attached to it so is appropriate for onEnabled
entry.plugin().onEnabled();
}
} catch (Exception e) {
// Store the exception to be thrown only once we complete the validatePut logic
lifecycleException = e;
}
} else {
oldEntry = pluginCache.remove(name);
try {
if (oldEntry != null && oldEntry.config().isEnabled()) {
// oldEntry has the old plugin config attached to it so is appropriate for onDisabled
oldEntry.plugin().onDisabled();
}
} catch (Exception e) {
// Store the exception to be thrown only once we complete the validatePut logic
lifecycleException = e;
}
}
moveToEphemeral(oldEntry);
pluginStore.put(name, config);
if (lifecycleException != null) {
throw new PluginException(
String.format(
"A lifecycle method in plugin %s failed. The initiating plugin " +
"config update has not been rolled back.",
name
),
lifecycleException
);
}
}
@Override
public void setEnabled(String name, boolean enable) throws PluginException {
// Works only with the stored config. (Some odd persistent stores do not
// actually serialize the config; they just cache a copy.) If we change
// anything, the next request will do a resync to pick up the change.
name = validateName(name);
StoragePluginConfig config = requireStoredConfig(name);
if (config.isEnabled() == enable) {
return;
}
StoragePluginConfig copy = copyConfig(config);
copy.setEnabled(enable);
validatedPut(name, copy);
}
/**
* Configs are obtained from the persistent store. This method is
* called only by the UI to edit a stored plugin; so no benefit to
* using the cache. We also want a plugin even if it is disabled,
* and disabled plugins do not reside in the cache.
* <p>
* Note that each call (depending on the store implementation)
* may return a distinct instance of the config. The instance will
* be equal (unless the stored version changes.) However, other
* versions of the store may return the same instance as is in
* the cache. So, <b>do not</b> modify the returned config.
* To modify the config, call {@link #copyConfig(String)} instead.
*/
@Override
public StoragePluginConfig getStoredConfig(String name) {
return pluginStore.get(name);
}
@Override
public StoragePluginConfig copyConfig(String name) throws PluginException {
return copyConfig(requireStoredConfig(name));
}
private StoragePluginConfig requireStoredConfig(String name) throws PluginException {
StoragePluginConfig config = getStoredConfig(name);
if (config == null) {
throw new PluginNotFoundException(name);
}
return config;
}
@Override
public String encode(StoragePluginConfig config) {
ObjectMapper mapper = context.mapper();
try {
return mapper.writer()
.forType(config.getClass())
.writeValueAsString(config);
} catch (IOException e) {
// We control serialization, so no errors should occur.
throw new IllegalStateException("Serialize failed", e);
}
}
@Override
public String encode(String name) throws PluginException {
return encode(requireStoredConfig(validateName(name)));
}
@Override
public StoragePluginConfig decode(String json) throws PluginEncodingException {
// We don't control the format of the input JSON, so an
// error could occur.
try {
return context.mapper().reader()
.forType(StoragePluginConfig.class)
.readValue(json);
} catch (InvalidTypeIdException | UnrecognizedPropertyException e) {
throw new PluginEncodingException(e.getMessage(), e);
} catch (IOException e) {
throw new PluginEncodingException("Failure when decoding plugin JSON", e);
}
}
@Override
public void putJson(String name, String json) throws PluginException {
validatedPut(name, decode(json));
}
@Override
public StoragePluginConfig copyConfig(StoragePluginConfig orig) {
try {
// TODO: Storage plugin configs don't define a "clone" or "copy"
// method, so use a round-trip to JSON to accomplish the same task.
return decode(encode(orig));
} catch (PluginEncodingException e) {
throw new IllegalStateException("De/serialize failed", e);
}
}
@Override
public StoragePluginConfig getDefinedConfig(String name) {
try {
name = validateName(name);
} catch (PluginException e) {
// Name is not valid, so no plugin matches the name.
return null;
}
PluginHandle entry = getEntry(name);
return entry == null ? null : entry.config();
}
// Gets a plugin with the named configuration
@Override
public StoragePlugin getPlugin(String name) throws PluginException {
try {
name = validateName(name);
} catch (PluginException e) {
// Name is not valid, so no plugin matches the name.
return null;
}
PluginHandle entry = getEntry(name);
// Lazy instantiation: the first call to plugin() creates the
// actual plugin instance.
return entry == null ? null : entry.plugin();
}
private PluginHandle getEntry(String name) {
PluginHandle plugin = pluginCache.get(name);
if (plugin != null && plugin.isIntrinsic()) {
return plugin;
}
StoragePluginConfig config = getStoredConfig(name);
if (plugin == null) {
return refresh(name, config);
} else {
return refresh(plugin, config);
}
}
// Lazy refresh for a plugin not known on this server.
private PluginHandle refresh(String name, StoragePluginConfig config) {
if (config == null || !config.isEnabled()) {
return null;
} else {
// Handles race conditions: some other thread may have just done what
// we're trying to do. Note: no need to close the new entry if
// there is a conflict: the plugin instance is created on demand
// and we've not done so.
return pluginCache.putIfAbsent(restoreFromEphemeral(name, config));
}
}
// Lazy refresh of a plugin we think we know about.
private PluginHandle refresh(PluginHandle entry, StoragePluginConfig config) {
// Deleted or disabled in persistent storage?
if (config == null || !config.isEnabled()) {
// Move the old config to the ephemeral store.
try {
if (pluginCache.remove(entry.name()) == entry) {
moveToEphemeral(entry);
}
return null;
} catch (PluginException e) {
// Should never occur, only if the persistent store where to
// somehow contain an entry with the same name as a system plugin.
throw new IllegalStateException("Plugin refresh failed", e);
}
}
// Unchanged?
if (entry.config().equals(config)) {
return entry;
}
// Plugin changed. Handle race condition on replacement.
PluginHandle newEntry = restoreFromEphemeral(entry.name(), config);
try {
if (pluginCache.replace(entry, newEntry)) {
moveToEphemeral(entry);
return newEntry;
} else {
return pluginCache.get(entry.name());
}
} catch (PluginException e) {
// Should never occur, only if the persistent store where to
// somehow contain an entry with the same name as a system plugin.
throw new IllegalStateException("Plugin refresh failed", e);
}
}
private void refresh() {
// Iterate through the plugin instances in the persistent store adding
// any new ones and refreshing those whose configuration has changed
Iterator<Entry<String, StoragePluginConfig>> allPlugins = pluginStore.load();
while (allPlugins.hasNext()) {
Entry<String, StoragePluginConfig> plugin = allPlugins.next();
refresh(plugin.getKey(), plugin.getValue());
}
}
@Override
public StoragePlugin getPlugin(StoragePluginConfig config) throws ExecutionSetupException {
try {
return getPluginByConfig(config);
} catch (PluginException e) {
throw translateException(e);
}
}
private ExecutionSetupException translateException(PluginException e) {
Throwable cause = e.getCause();
if (cause != null && cause instanceof ExecutionSetupException) {
return (ExecutionSetupException) cause;
}
return new ExecutionSetupException(e);
}
@Override
public StoragePlugin getPluginByConfig(StoragePluginConfig config) throws PluginException {
// Try to lookup plugin by configuration
PluginHandle plugin = pluginCache.get(config);
if (plugin != null) {
return plugin.plugin();
}
// No named plugin matches the desired configuration, let's create an
// ephemeral storage plugin (or get one from the cache)
try {
return ephemeralPlugins.get(config).plugin();
} catch (ExecutionException e) {
Throwable cause = e.getCause();
if (cause instanceof PluginException) {
throw (PluginException) cause;
} else {
// this shouldn't happen. here for completeness.
throw new PluginException(
"Failure while trying to create ephemeral plugin.", cause);
}
}
}
// This method is not thread-safe: there is no guarantee that the plugin
// deleted is the same one the user requested: someone else could have deleted
// the old one and added a new one of the same name.
// TODO: Fix this
@Override
public void remove(String name) throws PluginException {
name = validateName(name);
// Removing here allows us to check for system plugins
moveToEphemeral(pluginCache.remove(name));
// Must tell store to delete even if not known locally because
// the store might hold a disabled version
pluginStore.delete(name);
}
/**
* If there is an ephemeral plugin of this (name, config), pair,
* transfer that plugin out of ephemeral storage for reuse. Else
* create a new handle.
*
* @param name plugin name
* @param config plugin config
* @return a handle for the plugin which may have been retrieved from
* ephemeral storage
*/
private PluginHandle restoreFromEphemeral(String name,
StoragePluginConfig config) {
// Benign race condition between check and invalidate.
PluginHandle ephemeralEntry = ephemeralPlugins.getIfPresent(config);
if (ephemeralEntry == null || !name.equalsIgnoreCase(ephemeralEntry.name())) {
return createPluginEntry(name, config, PluginType.STORED);
} else {
// Transfer the instance to a new handle, then invalidate the
// cache entry. The transfer ensures that the invalidate will
// not close the plugin instance
PluginHandle newHandle = ephemeralEntry.transfer(PluginType.STORED);
ephemeralPlugins.invalidate(config);
return newHandle;
}
}
private void moveToEphemeral(PluginHandle handle) {
if (handle == null) {
return;
}
// No need to move if no instance.
if (!handle.hasInstance()) {
return;
}
// If already in the ephemeral store, don't replace.
// Race condition is benign: two threads both doing the put
// will cause the first handle to be closed when the second hits.
if (ephemeralPlugins.getIfPresent(handle.config()) == null) {
ephemeralPlugins.put(handle.config(), handle.transfer(PluginType.EPHEMERAL));
} else {
handle.close();
}
}
@Override
public Map<String, StoragePluginConfig> storedConfigs() {
return storedConfigs(PluginFilter.ALL);
}
@Override
public Map<String, StoragePluginConfig> storedConfigs(PluginFilter filter) {
Map<String, StoragePluginConfig> result = new HashMap<>();
Iterator<Entry<String, StoragePluginConfig>> allPlugins = pluginStore.load();
while (allPlugins.hasNext()) {
Entry<String, StoragePluginConfig> plugin = allPlugins.next();
boolean include;
switch (filter) {
case ENABLED:
include = plugin.getValue().isEnabled();
break;
case DISABLED:
include = !plugin.getValue().isEnabled();
break;
case TRANSLATES_USERS:
include = plugin.getValue().getAuthMode() == AuthMode.USER_TRANSLATION
&& plugin.getValue().isEnabled();
break;
default:
include = true;
}
if (include) {
result.put(plugin.getKey(), plugin.getValue());
}
}
return result;
}
@Override
public Map<String, StoragePluginConfig> enabledConfigs() {
refresh();
Map<String, StoragePluginConfig> result = new HashMap<>();
for (PluginHandle entry : pluginCache) {
if (entry.isStored()) {
result.put(entry.name(), entry.config());
}
}
return result;
}
@Override
public void putFormatPlugin(String pluginName, String formatName,
FormatPluginConfig formatConfig) throws PluginException {
pluginName = validateName(pluginName);
formatName = validateName(formatName);
StoragePluginConfig orig = requireStoredConfig(pluginName);
if (!(orig instanceof FileSystemConfig)) {
throw new PluginException(
"Format plugins can be added only to the file system plugin: " + pluginName);
}
FileSystemConfig copy = (FileSystemConfig) copyConfig(orig);
if (formatConfig == null) {
copy.getFormats().remove(formatName);
} else {
copy.getFormats().put(formatName, formatConfig);
}
put(pluginName, copy);
}
@Override
public FormatPlugin getFormatPluginByConfig(StoragePluginConfig storageConfig,
FormatPluginConfig formatConfig) throws PluginException {
StoragePlugin storagePlugin = getPluginByConfig(storageConfig);
return storagePlugin.getFormatPlugin(formatConfig);
}
@Override
public FormatPlugin getFormatPlugin(StoragePluginConfig storageConfig,
FormatPluginConfig formatConfig) throws ExecutionSetupException {
try {
return getFormatPluginByConfig(storageConfig, formatConfig);
} catch (PluginException e) {
throw translateException(e);
}
}
@Override
public SchemaFactory getSchemaFactory() {
return schemaFactory;
}
// TODO: Remove this: it will force plugins to be instantiated
// unnecessarily
// This is a bit of a hack. The planner calls this to get rules
// for queries. If even one plugin has issues, then all queries
// will fails, even those that don't use the invalid plugin.
//
// This hack may result in a delay (such as a timeout) again and
// again as each query tries to create the plugin. The solution is
// to disable the plugin, or fix the external system. This solution
// is more stable than, say, marking the plugin failed since we have
// no way to show or reset failed plugins.
private static class PluginIterator implements Iterator<Entry<String, StoragePlugin>> {
private final Iterator<PluginHandle> base;
private PluginHandle entry;
public PluginIterator(Iterator<PluginHandle> base) {
this.base = base;
}
@Override
public boolean hasNext() {
while (base.hasNext()) {
entry = base.next();
try {
entry.plugin();
return true;
} catch (Exception e) {
// Skip this one to avoid failing the query
}
}
return false;
}
@Override
public Entry<String, StoragePlugin> next() {
return new ImmutableEntry<>(entry.name(), entry.plugin());
}
}
@Override
public Iterator<Entry<String, StoragePlugin>> iterator() {
refresh();
return new PluginIterator(pluginCache.iterator());
}
@Override
public synchronized void close() throws Exception {
ephemeralPlugins.invalidateAll();
pluginCache.close();
pluginStore.close();
locators.forEach(loc -> loc.close());
}
/**
* Creates plugin entry with the given {@code name} and configuration {@code pluginConfig}.
* Validation for existence, disabled, etc. should have been done by the caller.
* <p>
* Uses the config to find the connector, then lets the connector create the plugin
* entry. Creation of the plugin instance is deferred until first requested.
* This should speed up Drillbit start, as long as other code only asks for the
* plugin instance when it is actually needed to plan or execute a query (not just
* to provide a schema.)
*
* @param name name of the plugin
* @param pluginConfig plugin configuration
* @return handle the the plugin with metadata and deferred access to
* the plugin instance
*/
private PluginHandle createPluginEntry(String name, StoragePluginConfig pluginConfig, PluginType type) {
ConnectorHandle connector = connectors.get(pluginConfig.getClass());
if (connector == null) {
throw UserException.internalError()
.message("No connector known for plugin configuration")
.addContext("Plugin name", name)
.addContext("Config class", pluginConfig.getClass().getName())
.build(logger);
}
return connector.pluginEntryFor(name, pluginConfig, type);
}
@Override
public ObjectMapper mapper() {
return context.mapper();
}
@Override
public <T extends StoragePlugin> T resolve(
StoragePluginConfig storageConfig, Class<T> desired) {
try {
return desired.cast(getPluginByConfig(storageConfig));
} catch (PluginException|ClassCastException e) {
// Should never occur
throw new IllegalStateException(String.format(
"Unable to load stroage plugin %s for provided config " +
"class %s", desired.getName(),
storageConfig.getClass().getName()), e);
}
}
@Override
public <T extends FormatPlugin> T resolveFormat(
StoragePluginConfig storageConfig,
FormatPluginConfig formatConfig, Class<T> desired) {
try {
return desired.cast(getFormatPluginByConfig(storageConfig, formatConfig));
} catch (PluginException|ClassCastException e) {
// Should never occur
throw new IllegalStateException(String.format(
"Unable to load format plugin %s for provided plugin " +
"config class %s and format config class %s",
desired.getName(),
storageConfig.getClass().getName(),
formatConfig.getClass().getName()), e);
}
}
@Override
public Set<String> availablePlugins() {
refresh();
return pluginCache.names();
}
}
|
apache/hbase | 36,199 | hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestBasicWALEntryStream.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.replication.regionserver;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.NavigableMap;
import java.util.OptionalLong;
import java.util.TreeMap;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.Future;
import java.util.concurrent.PriorityBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.Waiter.ExplainingPredicate;
import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;
import org.apache.hadoop.hbase.regionserver.wal.AbstractProtobufWALReader;
import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;
import org.apache.hadoop.hbase.replication.WALEntryFilter;
import org.apache.hadoop.hbase.replication.regionserver.WALEntryStream.HasNext;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALEditInternalHelper;
import org.apache.hadoop.hbase.wal.WALFactory;
import org.apache.hadoop.hbase.wal.WALKeyImpl;
import org.apache.hadoop.hbase.wal.WALProvider;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
import org.mockito.Mockito;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader;
public abstract class TestBasicWALEntryStream extends WALEntryStreamTestBase {
@Parameter
public boolean isCompressionEnabled;
@Parameters(name = "{index}: isCompressionEnabled={0}")
public static Iterable<Object[]> data() {
return Arrays.asList(new Object[] { false }, new Object[] { true });
}
@Before
public void setUp() throws Exception {
CONF.setBoolean(HConstants.ENABLE_WAL_COMPRESSION, isCompressionEnabled);
initWAL();
}
private WAL.Entry next(WALEntryStream entryStream) {
assertEquals(HasNext.YES, entryStream.hasNext());
return entryStream.next();
}
/**
* Tests basic reading of log appends
*/
@Test
public void testAppendsWithRolls() throws Exception {
appendToLogAndSync();
long oldPos;
try (WALEntryStream entryStream =
new WALEntryStream(logQueue, fs, CONF, 0, log, new MetricsSource("1"), fakeWalGroupId)) {
// There's one edit in the log, read it. Reading past it needs to throw exception
assertEquals(HasNext.YES, entryStream.hasNext());
WAL.Entry entry = entryStream.peek();
assertSame(entry, entryStream.next());
assertNotNull(entry);
assertEquals(HasNext.RETRY, entryStream.hasNext());
assertNull(entryStream.peek());
assertThrows(IllegalStateException.class, () -> entryStream.next());
oldPos = entryStream.getPosition();
}
appendToLogAndSync();
try (WALEntryStream entryStream = new WALEntryStreamWithRetries(logQueue, fs, CONF, oldPos, log,
new MetricsSource("1"), fakeWalGroupId)) {
// Read the newly added entry, make sure we made progress
WAL.Entry entry = next(entryStream);
assertNotEquals(oldPos, entryStream.getPosition());
assertNotNull(entry);
oldPos = entryStream.getPosition();
}
// We rolled but we still should see the end of the first log and get that item
appendToLogAndSync();
log.rollWriter();
appendToLogAndSync();
try (WALEntryStreamWithRetries entryStream = new WALEntryStreamWithRetries(logQueue, fs, CONF,
oldPos, log, new MetricsSource("1"), fakeWalGroupId)) {
WAL.Entry entry = next(entryStream);
assertNotEquals(oldPos, entryStream.getPosition());
assertNotNull(entry);
// next item should come from the new log
entry = next(entryStream);
assertNotEquals(oldPos, entryStream.getPosition());
assertNotNull(entry);
// no more entries to read, disable retry otherwise we will get a wait too much time error
entryStream.disableRetry();
assertEquals(HasNext.RETRY, entryStream.hasNext());
oldPos = entryStream.getPosition();
}
}
/**
* Tests that if after a stream is opened, more entries come in and then the log is rolled, we
* don't mistakenly dequeue the current log thinking we're done with it
*/
@Test
public void testLogRollWhileStreaming() throws Exception {
appendToLog("1");
// 2
appendToLog("2");
try (WALEntryStreamWithRetries entryStream = new WALEntryStreamWithRetries(logQueue, fs, CONF,
0, log, new MetricsSource("1"), fakeWalGroupId)) {
assertEquals("1", getRow(next(entryStream)));
// 3 - comes in after reader opened
appendToLog("3");
// log roll happening while we're reading
log.rollWriter();
// 4 - this append is in the rolled log
appendToLog("4");
assertEquals("2", getRow(next(entryStream)));
// we should not have dequeued yet since there's still an entry in first log
assertEquals(2, getQueue().size());
// if implemented improperly, this would be 4 and 3 would be skipped
assertEquals("3", getRow(next(entryStream)));
// 4
assertEquals("4", getRow(next(entryStream)));
// now we've dequeued and moved on to next log properly
assertEquals(1, getQueue().size());
// disable so we can get the return value immediately, otherwise we will fail with wait too
// much time...
entryStream.disableRetry();
assertEquals(HasNext.RETRY, entryStream.hasNext());
}
}
/**
* Tests that if writes come in while we have a stream open, we shouldn't miss them
*/
@Test
public void testNewEntriesWhileStreaming() throws Exception {
appendToLog("1");
try (WALEntryStream entryStream =
new WALEntryStream(logQueue, fs, CONF, 0, log, new MetricsSource("1"), fakeWalGroupId)) {
assertNotNull(next(entryStream)); // we've hit the end of the stream at this point
// some new entries come in while we're streaming
appendToLog("2");
appendToLog("3");
// don't see them
assertEquals(HasNext.RETRY, entryStream.hasNext());
// But we do if we retry next time, as the entryStream will reset the reader
assertEquals("2", getRow(next(entryStream)));
assertEquals("3", getRow(next(entryStream)));
// reached the end again
assertEquals(HasNext.RETRY, entryStream.hasNext());
}
}
@Test
public void testResumeStreamingFromPosition() throws Exception {
long lastPosition = 0;
appendToLog("1");
try (WALEntryStream entryStream =
new WALEntryStream(logQueue, fs, CONF, 0, log, new MetricsSource("1"), fakeWalGroupId)) {
assertNotNull(next(entryStream)); // we've hit the end of the stream at this point
appendToLog("2");
appendToLog("3");
lastPosition = entryStream.getPosition();
}
// next stream should picks up where we left off
try (WALEntryStream entryStream = new WALEntryStream(logQueue, fs, CONF, lastPosition, log,
new MetricsSource("1"), fakeWalGroupId)) {
assertEquals("2", getRow(next(entryStream)));
assertEquals("3", getRow(next(entryStream)));
assertEquals(HasNext.RETRY, entryStream.hasNext()); // done
assertEquals(1, getQueue().size());
}
}
/**
* Tests that if we stop before hitting the end of a stream, we can continue where we left off
* using the last position
*/
@Test
public void testPosition() throws Exception {
long lastPosition = 0;
appendEntriesToLogAndSync(3);
// read only one element
try (WALEntryStream entryStream = new WALEntryStream(logQueue, fs, CONF, lastPosition, log,
new MetricsSource("1"), fakeWalGroupId)) {
assertNotNull(next(entryStream));
lastPosition = entryStream.getPosition();
}
// there should still be two more entries from where we left off
try (WALEntryStream entryStream = new WALEntryStream(logQueue, fs, CONF, lastPosition, log,
new MetricsSource("1"), fakeWalGroupId)) {
assertNotNull(next(entryStream));
assertNotNull(next(entryStream));
assertEquals(HasNext.RETRY, entryStream.hasNext());
}
}
@Test
public void testEmptyStream() throws Exception {
try (WALEntryStream entryStream =
new WALEntryStream(logQueue, fs, CONF, 0, log, new MetricsSource("1"), fakeWalGroupId)) {
assertEquals(HasNext.RETRY, entryStream.hasNext());
}
}
@Test
public void testWALKeySerialization() throws Exception {
Map<String, byte[]> attributes = new HashMap<String, byte[]>();
attributes.put("foo", Bytes.toBytes("foo-value"));
attributes.put("bar", Bytes.toBytes("bar-value"));
WALKeyImpl key =
new WALKeyImpl(info.getEncodedNameAsBytes(), tableName, EnvironmentEdgeManager.currentTime(),
new ArrayList<UUID>(), 0L, 0L, mvcc, scopes, attributes);
Assert.assertEquals(attributes, key.getExtendedAttributes());
WALProtos.WALKey.Builder builder = key.getBuilder(WALCellCodec.getNoneCompressor());
WALProtos.WALKey serializedKey = builder.build();
WALKeyImpl deserializedKey = new WALKeyImpl();
deserializedKey.readFieldsFromPb(serializedKey, WALCellCodec.getNoneUncompressor());
// equals() only checks region name, sequence id and write time
Assert.assertEquals(key, deserializedKey);
// can't use Map.equals() because byte arrays use reference equality
Assert.assertEquals(key.getExtendedAttributes().keySet(),
deserializedKey.getExtendedAttributes().keySet());
for (Map.Entry<String, byte[]> entry : deserializedKey.getExtendedAttributes().entrySet()) {
Assert.assertArrayEquals(key.getExtendedAttribute(entry.getKey()), entry.getValue());
}
Assert.assertEquals(key.getReplicationScopes(), deserializedKey.getReplicationScopes());
}
private ReplicationSource mockReplicationSource(boolean recovered, Configuration conf)
throws IOException {
ReplicationSourceManager mockSourceManager = new ReplicationSourceManager(null, null, conf,
null, null, null, null, null, null, null, createMockGlobalMetrics());
Server mockServer = Mockito.mock(Server.class);
ReplicationSource source = Mockito.mock(ReplicationSource.class);
when(source.getSourceManager()).thenReturn(mockSourceManager);
when(source.getSourceMetrics()).thenReturn(new MetricsSource("1"));
when(source.getWALFileLengthProvider()).thenReturn(log);
when(source.getServer()).thenReturn(mockServer);
when(source.isRecovered()).thenReturn(recovered);
return source;
}
private MetricsReplicationGlobalSourceSource createMockGlobalMetrics() {
MetricsReplicationGlobalSourceSource globalMetrics =
Mockito.mock(MetricsReplicationGlobalSourceSource.class);
final AtomicLong bufferUsedCounter = new AtomicLong(0);
Mockito.doAnswer((invocationOnMock) -> {
bufferUsedCounter.set(invocationOnMock.getArgument(0, Long.class));
return null;
}).when(globalMetrics).setWALReaderEditsBufferBytes(Mockito.anyLong());
when(globalMetrics.getWALReaderEditsBufferBytes())
.then(invocationOnMock -> bufferUsedCounter.get());
return globalMetrics;
}
private ReplicationSourceWALReader createReader(boolean recovered, Configuration conf)
throws IOException {
ReplicationSource source = mockReplicationSource(recovered, conf);
when(source.isPeerEnabled()).thenReturn(true);
ReplicationSourceWALReader reader = new ReplicationSourceWALReader(fs, conf, logQueue, 0,
getDummyFilter(), source, fakeWalGroupId);
reader.start();
return reader;
}
private ReplicationSourceWALReader createReaderWithBadReplicationFilter(int numFailures,
Configuration conf) throws IOException {
ReplicationSource source = mockReplicationSource(false, conf);
when(source.isPeerEnabled()).thenReturn(true);
ReplicationSourceWALReader reader = new ReplicationSourceWALReader(fs, conf, logQueue, 0,
getIntermittentFailingFilter(numFailures), source, fakeWalGroupId);
reader.start();
return reader;
}
@Test
public void testReplicationSourceWALReader() throws Exception {
appendEntriesToLogAndSync(3);
// get ending position
long position;
try (WALEntryStream entryStream =
new WALEntryStream(logQueue, fs, CONF, 0, log, new MetricsSource("1"), fakeWalGroupId)) {
for (int i = 0; i < 3; i++) {
assertNotNull(next(entryStream));
}
position = entryStream.getPosition();
}
// start up a reader
Path walPath = getQueue().peek();
ReplicationSourceWALReader reader = createReader(false, CONF);
WALEntryBatch entryBatch = reader.take();
// should've batched up our entries
assertNotNull(entryBatch);
assertEquals(3, entryBatch.getWalEntries().size());
assertEquals(position, entryBatch.getLastWalPosition());
assertEquals(walPath, entryBatch.getLastWalPath());
assertEquals(3, entryBatch.getNbRowKeys());
appendToLog("foo");
entryBatch = reader.take();
assertEquals(1, entryBatch.getNbEntries());
assertEquals("foo", getRow(entryBatch.getWalEntries().get(0)));
}
@Test
public void testReplicationSourceWALReaderWithFailingFilter() throws Exception {
appendEntriesToLogAndSync(3);
// get ending position
long position;
try (WALEntryStream entryStream =
new WALEntryStream(logQueue, fs, CONF, 0, log, new MetricsSource("1"), fakeWalGroupId)) {
for (int i = 0; i < 3; i++) {
assertNotNull(next(entryStream));
}
position = entryStream.getPosition();
}
// start up a reader
Path walPath = getQueue().peek();
int numFailuresInFilter = 5;
ReplicationSourceWALReader reader =
createReaderWithBadReplicationFilter(numFailuresInFilter, CONF);
WALEntryBatch entryBatch = reader.take();
assertEquals(numFailuresInFilter, FailingWALEntryFilter.numFailures());
// should've batched up our entries
assertNotNull(entryBatch);
assertEquals(3, entryBatch.getWalEntries().size());
assertEquals(position, entryBatch.getLastWalPosition());
assertEquals(walPath, entryBatch.getLastWalPath());
assertEquals(3, entryBatch.getNbRowKeys());
}
@Test
public void testReplicationSourceWALReaderRecovered() throws Exception {
appendEntriesToLogAndSync(10);
Path walPath = getQueue().peek();
log.rollWriter();
appendEntriesToLogAndSync(5);
log.shutdown();
Configuration conf = new Configuration(CONF);
conf.setInt("replication.source.nb.capacity", 10);
ReplicationSourceWALReader reader = createReader(true, conf);
WALEntryBatch batch = reader.take();
assertEquals(walPath, batch.getLastWalPath());
assertEquals(10, batch.getNbEntries());
assertFalse(batch.isEndOfFile());
batch = reader.take();
assertEquals(walPath, batch.getLastWalPath());
assertEquals(0, batch.getNbEntries());
assertTrue(batch.isEndOfFile());
walPath = getQueue().peek();
batch = reader.take();
assertEquals(walPath, batch.getLastWalPath());
assertEquals(5, batch.getNbEntries());
assertTrue(batch.isEndOfFile());
assertSame(WALEntryBatch.NO_MORE_DATA, reader.take());
}
// Testcase for HBASE-20206
@Test
public void testReplicationSourceWALReaderWrongPosition() throws Exception {
appendEntriesToLogAndSync(1);
Path walPath = getQueue().peek();
log.rollWriter();
appendEntriesToLogAndSync(20);
TEST_UTIL.waitFor(5000, new ExplainingPredicate<Exception>() {
@Override
public boolean evaluate() throws Exception {
return fs.getFileStatus(walPath).getLen() > 0
&& ((AbstractFSWAL<?>) log).getInflightWALCloseCount() == 0;
}
@Override
public String explainFailure() throws Exception {
return walPath + " has not been closed yet";
}
});
ReplicationSourceWALReader reader = createReader(false, CONF);
WALEntryBatch entryBatch = reader.take();
assertEquals(walPath, entryBatch.getLastWalPath());
long walLength = fs.getFileStatus(walPath).getLen();
assertTrue("Position " + entryBatch.getLastWalPosition() + " is out of range, file length is "
+ walLength, entryBatch.getLastWalPosition() <= walLength);
assertEquals(1, entryBatch.getNbEntries());
assertTrue(entryBatch.isEndOfFile());
Path walPath2 = getQueue().peek();
entryBatch = reader.take();
assertEquals(walPath2, entryBatch.getLastWalPath());
assertEquals(20, entryBatch.getNbEntries());
assertFalse(entryBatch.isEndOfFile());
log.rollWriter();
appendEntriesToLogAndSync(10);
entryBatch = reader.take();
assertEquals(walPath2, entryBatch.getLastWalPath());
assertEquals(0, entryBatch.getNbEntries());
assertTrue(entryBatch.isEndOfFile());
Path walPath3 = getQueue().peek();
entryBatch = reader.take();
assertEquals(walPath3, entryBatch.getLastWalPath());
assertEquals(10, entryBatch.getNbEntries());
assertFalse(entryBatch.isEndOfFile());
}
@Test
public void testReplicationSourceWALReaderDisabled()
throws IOException, InterruptedException, ExecutionException {
appendEntriesToLogAndSync(3);
// get ending position
long position;
try (WALEntryStream entryStream =
new WALEntryStream(logQueue, fs, CONF, 0, log, new MetricsSource("1"), fakeWalGroupId)) {
for (int i = 0; i < 3; i++) {
assertNotNull(next(entryStream));
}
position = entryStream.getPosition();
}
// start up a reader
Path walPath = getQueue().peek();
ReplicationSource source = mockReplicationSource(false, CONF);
AtomicInteger invokeCount = new AtomicInteger(0);
AtomicBoolean enabled = new AtomicBoolean(false);
when(source.isPeerEnabled()).then(i -> {
invokeCount.incrementAndGet();
return enabled.get();
});
ReplicationSourceWALReader reader = new ReplicationSourceWALReader(fs, CONF, logQueue, 0,
getDummyFilter(), source, fakeWalGroupId);
reader.start();
Future<WALEntryBatch> future = ForkJoinPool.commonPool().submit(() -> {
return reader.take();
});
// make sure that the isPeerEnabled has been called several times
TEST_UTIL.waitFor(30000, () -> invokeCount.get() >= 5);
// confirm that we can read nothing if the peer is disabled
assertFalse(future.isDone());
// then enable the peer, we should get the batch
enabled.set(true);
WALEntryBatch entryBatch = future.get();
// should've batched up our entries
assertNotNull(entryBatch);
assertEquals(3, entryBatch.getWalEntries().size());
assertEquals(position, entryBatch.getLastWalPosition());
assertEquals(walPath, entryBatch.getLastWalPath());
assertEquals(3, entryBatch.getNbRowKeys());
}
private String getRow(WAL.Entry entry) {
Cell cell = entry.getEdit().getCells().get(0);
return Bytes.toString(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
}
private void appendToLog(String key) throws IOException {
final long txid = log.appendData(info, new WALKeyImpl(info.getEncodedNameAsBytes(), tableName,
EnvironmentEdgeManager.currentTime(), mvcc, scopes), getWALEdit(key));
log.sync(txid);
}
private void appendEntriesToLogAndSync(int count) throws IOException {
long txid = -1L;
for (int i = 0; i < count; i++) {
txid = appendToLog(1);
}
log.sync(txid);
}
private WALEdit getWALEdit(String row) {
WALEdit edit = new WALEdit();
WALEditInternalHelper.addExtendedCell(edit, new KeyValue(Bytes.toBytes(row), family, qualifier,
EnvironmentEdgeManager.currentTime(), qualifier));
return edit;
}
private WALEntryFilter getDummyFilter() {
return new WALEntryFilter() {
@Override
public WAL.Entry filter(WAL.Entry entry) {
return entry;
}
};
}
private WALEntryFilter getIntermittentFailingFilter(int numFailuresInFilter) {
return new FailingWALEntryFilter(numFailuresInFilter);
}
public static class FailingWALEntryFilter implements WALEntryFilter {
private int numFailures = 0;
private static int countFailures = 0;
public FailingWALEntryFilter(int numFailuresInFilter) {
numFailures = numFailuresInFilter;
}
@Override
public WAL.Entry filter(WAL.Entry entry) {
if (countFailures == numFailures) {
return entry;
}
countFailures = countFailures + 1;
throw new WALEntryFilterRetryableException("failing filter");
}
public static int numFailures() {
return countFailures;
}
}
@Test
public void testReadBeyondCommittedLength() throws IOException, InterruptedException {
appendToLog("1");
appendToLog("2");
long size = log.getLogFileSizeIfBeingWritten(getQueue().peek()).getAsLong();
AtomicLong fileLength = new AtomicLong(size - 1);
try (WALEntryStream entryStream = new WALEntryStream(logQueue, fs, CONF, 0,
p -> OptionalLong.of(fileLength.get()), new MetricsSource("1"), fakeWalGroupId)) {
assertNotNull(next(entryStream));
// can not get log 2
assertEquals(HasNext.RETRY, entryStream.hasNext());
Thread.sleep(1000);
// still can not get log 2
assertEquals(HasNext.RETRY, entryStream.hasNext());
// can get log 2 now
fileLength.set(size);
assertNotNull(next(entryStream));
assertEquals(HasNext.RETRY, entryStream.hasNext());
}
}
/**
* Test removal of 0 length log from logQueue if the source is a recovered source and size of
* logQueue is only 1.
*/
@Test
public void testEOFExceptionForRecoveredQueue() throws Exception {
// Create a 0 length log.
Path emptyLog = new Path("emptyLog");
FSDataOutputStream fsdos = fs.create(emptyLog);
fsdos.close();
assertEquals(0, fs.getFileStatus(emptyLog).getLen());
Configuration conf = new Configuration(CONF);
// Override the max retries multiplier to fail fast.
conf.setInt("replication.source.maxretriesmultiplier", 1);
conf.setBoolean("replication.source.eof.autorecovery", true);
conf.setInt("replication.source.nb.batches", 10);
// Create a reader thread with source as recovered source.
ReplicationSource source = mockReplicationSource(true, conf);
when(source.isPeerEnabled()).thenReturn(true);
MetricsSource metrics = mock(MetricsSource.class);
doNothing().when(metrics).incrSizeOfLogQueue();
doNothing().when(metrics).decrSizeOfLogQueue();
ReplicationSourceLogQueue localLogQueue = new ReplicationSourceLogQueue(conf, metrics, source);
localLogQueue.enqueueLog(emptyLog, fakeWalGroupId);
ReplicationSourceWALReader reader = new ReplicationSourceWALReader(fs, conf, localLogQueue, 0,
getDummyFilter(), source, fakeWalGroupId);
reader.start();
reader.join();
// ReplicationSourceWALReaderThread#handleEofException method will
// remove empty log from logQueue.
assertEquals(0, localLogQueue.getQueueSize(fakeWalGroupId));
}
@Test
public void testEOFExceptionForRecoveredQueueWithMultipleLogs() throws Exception {
Configuration conf = new Configuration(CONF);
MetricsSource metrics = mock(MetricsSource.class);
ReplicationSource source = mockReplicationSource(true, conf);
ReplicationSourceLogQueue localLogQueue = new ReplicationSourceLogQueue(conf, metrics, source);
// Create a 0 length log.
Path emptyLog = new Path(fs.getHomeDirectory(), "log.2." + isCompressionEnabled);
fs.create(emptyLog).close();
assertEquals(0, fs.getFileStatus(emptyLog).getLen());
localLogQueue.enqueueLog(emptyLog, fakeWalGroupId);
final Path log1 = new Path(fs.getHomeDirectory(), "log.1." + isCompressionEnabled);
WALProvider.Writer writer1 = WALFactory.createWALWriter(fs, log1, TEST_UTIL.getConfiguration());
appendEntries(writer1, 3);
localLogQueue.enqueueLog(log1, fakeWalGroupId);
when(source.isPeerEnabled()).thenReturn(true);
// Override the max retries multiplier to fail fast.
conf.setInt("replication.source.maxretriesmultiplier", 1);
conf.setBoolean("replication.source.eof.autorecovery", true);
conf.setInt("replication.source.nb.batches", 10);
// Create a reader thread.
ReplicationSourceWALReader reader = new ReplicationSourceWALReader(fs, conf, localLogQueue, 0,
getDummyFilter(), source, fakeWalGroupId);
assertEquals("Initial log queue size is not correct", 2,
localLogQueue.getQueueSize(fakeWalGroupId));
reader.start();
reader.join();
// remove empty log from logQueue.
assertEquals(0, localLogQueue.getQueueSize(fakeWalGroupId));
assertEquals("Log queue should be empty", 0, localLogQueue.getQueueSize(fakeWalGroupId));
}
private PriorityBlockingQueue<Path> getQueue() {
return logQueue.getQueue(fakeWalGroupId);
}
private void appendEntries(WALProvider.Writer writer, int numEntries) throws IOException {
for (int i = 0; i < numEntries; i++) {
byte[] b = Bytes.toBytes(Integer.toString(i));
KeyValue kv = new KeyValue(b, b, b);
WALEdit edit = new WALEdit();
WALEditInternalHelper.addExtendedCell(edit, kv);
WALKeyImpl key = new WALKeyImpl(b, TableName.valueOf(b), 0, 0, HConstants.DEFAULT_CLUSTER_ID);
NavigableMap<byte[], Integer> scopes = new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
scopes.put(b, HConstants.REPLICATION_SCOPE_GLOBAL);
writer.append(new WAL.Entry(key, edit));
writer.sync(false);
}
writer.close();
}
/***
* Tests size of log queue is incremented and decremented properly.
*/
@Test
public void testSizeOfLogQueue() throws Exception {
// There should be always 1 log which is current wal.
assertEquals(1, logQueue.getMetrics().getSizeOfLogQueue());
appendToLogAndSync();
log.rollWriter();
// wait until the previous WAL file is cleanly closed, so later we can aleays see
// RETRY_IMMEDIATELY instead of RETRY. The wait here is necessary because the closing of a WAL
// writer is asynchronouns
TEST_UTIL.waitFor(30000, () -> fs.getClient().isFileClosed(logQueue.getQueue(fakeWalGroupId)
.peek().makeQualified(fs.getUri(), fs.getWorkingDirectory()).toUri().getPath()));
// After rolling there will be 2 wals in the queue
assertEquals(2, logQueue.getMetrics().getSizeOfLogQueue());
try (WALEntryStream entryStream =
new WALEntryStream(logQueue, fs, CONF, 0, log, logQueue.getMetrics(), fakeWalGroupId)) {
// There's one edit in the log, read it.
assertNotNull(next(entryStream));
// we've switched to the next WAL, and the previous WAL file is closed cleanly, so it is
// RETRY_IMMEDIATELY
assertEquals(HasNext.RETRY_IMMEDIATELY, entryStream.hasNext());
}
// After removing one wal, size of log queue will be 1 again.
assertEquals(1, logQueue.getMetrics().getSizeOfLogQueue());
}
/**
* Tests that wals are closed cleanly and we read the trailer when we remove wal from
* WALEntryStream.
*/
@Test
public void testCleanClosedWALs() throws Exception {
try (WALEntryStream entryStream = new WALEntryStreamWithRetries(logQueue, fs, CONF, 0, log,
logQueue.getMetrics(), fakeWalGroupId)) {
assertEquals(0, logQueue.getMetrics().getUncleanlyClosedWALs());
appendToLogAndSync();
assertNotNull(next(entryStream));
log.rollWriter();
appendToLogAndSync();
assertNotNull(next(entryStream));
assertEquals(0, logQueue.getMetrics().getUncleanlyClosedWALs());
}
}
/**
* Tests that we handle EOFException properly if the wal has moved to oldWALs directory.
*/
@Test
public void testEOFExceptionInOldWALsDirectory() throws Exception {
assertEquals(1, logQueue.getQueueSize(fakeWalGroupId));
AbstractFSWAL<?> abstractWAL = (AbstractFSWAL<?>) log;
Path emptyLogFile = abstractWAL.getCurrentFileName();
log.rollWriter(true);
// AsyncFSWAl and FSHLog both moves the log from WALs to oldWALs directory asynchronously.
// Wait for in flight wal close count to become 0. This makes sure that empty wal is moved to
// oldWALs directory.
Waiter.waitFor(CONF, 5000,
(Waiter.Predicate<Exception>) () -> abstractWAL.getInflightWALCloseCount() == 0);
// There will 2 logs in the queue.
assertEquals(2, logQueue.getQueueSize(fakeWalGroupId));
// Get the archived dir path for the first wal.
Path archivePath = AbstractFSWALProvider.findArchivedLog(emptyLogFile, CONF);
// Make sure that the wal path is not the same as archived Dir path.
assertNotNull(archivePath);
assertTrue(fs.exists(archivePath));
fs.truncate(archivePath, 0);
// make sure the size of the wal file is 0.
assertEquals(0, fs.getFileStatus(archivePath).getLen());
ReplicationSource source = Mockito.mock(ReplicationSource.class);
when(source.isPeerEnabled()).thenReturn(true);
Configuration localConf = new Configuration(CONF);
localConf.setInt("replication.source.maxretriesmultiplier", 1);
localConf.setBoolean("replication.source.eof.autorecovery", true);
// Start the reader thread.
createReader(false, localConf);
// Wait for the replication queue size to be 1. This means that we have handled
// 0 length wal from oldWALs directory.
Waiter.waitFor(localConf, 10000,
(Waiter.Predicate<Exception>) () -> logQueue.getQueueSize(fakeWalGroupId) == 1);
}
/**
* This test is for HBASE-27778, when {@link WALEntryFilter#filter} throws exception for some
* entries in {@link WALEntryBatch},{@link ReplicationSourceWALReader#totalBufferUsed} should be
* decreased because {@link WALEntryBatch} is not put to
* {@link ReplicationSourceWALReader#entryBatchQueue}.
*/
@Test
public void testReplicationSourceWALReaderWithPartialWALEntryFailingFilter() throws Exception {
appendEntriesToLogAndSync(3);
// get ending position
long position;
try (WALEntryStream entryStream =
new WALEntryStream(logQueue, fs, CONF, 0, log, new MetricsSource("1"), fakeWalGroupId)) {
for (int i = 0; i < 3; i++) {
assertNotNull(next(entryStream));
}
position = entryStream.getPosition();
}
Path walPath = getQueue().peek();
int maxThrowExceptionCount = 3;
ReplicationSource source = mockReplicationSource(false, CONF);
when(source.isPeerEnabled()).thenReturn(true);
PartialWALEntryFailingWALEntryFilter walEntryFilter =
new PartialWALEntryFailingWALEntryFilter(maxThrowExceptionCount, 3);
ReplicationSourceWALReader reader =
new ReplicationSourceWALReader(fs, CONF, logQueue, 0, walEntryFilter, source, fakeWalGroupId);
reader.start();
WALEntryBatch entryBatch = reader.take();
assertNotNull(entryBatch);
assertEquals(3, entryBatch.getWalEntries().size());
long sum = entryBatch.getWalEntries().stream()
.mapToLong(WALEntryBatch::getEntrySizeExcludeBulkLoad).sum();
assertEquals(position, entryBatch.getLastWalPosition());
assertEquals(walPath, entryBatch.getLastWalPath());
assertEquals(3, entryBatch.getNbRowKeys());
assertEquals(sum, source.getSourceManager().getTotalBufferUsed());
assertEquals(sum, source.getSourceManager().getGlobalMetrics().getWALReaderEditsBufferBytes());
assertEquals(maxThrowExceptionCount, walEntryFilter.getThrowExceptionCount());
assertNull(reader.poll(10));
}
// testcase for HBASE-28748
@Test
public void testWALEntryStreamEOFRightAfterHeader() throws Exception {
assertEquals(1, logQueue.getQueueSize(fakeWalGroupId));
AbstractFSWAL<?> abstractWAL = (AbstractFSWAL<?>) log;
Path emptyLogFile = abstractWAL.getCurrentFileName();
log.rollWriter(true);
// AsyncFSWAl and FSHLog both moves the log from WALs to oldWALs directory asynchronously.
// Wait for in flight wal close count to become 0. This makes sure that empty wal is moved to
// oldWALs directory.
Waiter.waitFor(CONF, 5000,
(Waiter.Predicate<Exception>) () -> abstractWAL.getInflightWALCloseCount() == 0);
// There will 2 logs in the queue.
assertEquals(2, logQueue.getQueueSize(fakeWalGroupId));
appendToLogAndSync();
Path archivedEmptyLogFile = AbstractFSWALProvider.findArchivedLog(emptyLogFile, CONF);
// read the wal header
ByteArrayOutputStream bos = new ByteArrayOutputStream();
bos.write(AbstractProtobufWALReader.PB_WAL_MAGIC);
try (FSDataInputStream in = fs.open(archivedEmptyLogFile)) {
IOUtils.skipFully(in, AbstractProtobufWALReader.PB_WAL_MAGIC.length);
WALHeader header = WALHeader.parseDelimitedFrom(in);
header.writeDelimitedTo(bos);
}
// truncate the first empty log so we have an incomplete header
try (FSDataOutputStream out = fs.create(archivedEmptyLogFile, true)) {
bos.writeTo(out);
}
try (WALEntryStream entryStream =
new WALEntryStream(logQueue, fs, CONF, 0, log, new MetricsSource("1"), fakeWalGroupId)) {
assertEquals(HasNext.RETRY_IMMEDIATELY, entryStream.hasNext());
assertNotNull(next(entryStream));
}
}
private static class PartialWALEntryFailingWALEntryFilter implements WALEntryFilter {
private int filteredWALEntryCount = -1;
private int walEntryCount = 0;
private int throwExceptionCount = -1;
private int maxThrowExceptionCount;
public PartialWALEntryFailingWALEntryFilter(int throwExceptionLimit, int walEntryCount) {
this.maxThrowExceptionCount = throwExceptionLimit;
this.walEntryCount = walEntryCount;
}
@Override
public WAL.Entry filter(WAL.Entry entry) {
filteredWALEntryCount++;
if (filteredWALEntryCount < walEntryCount - 1) {
return entry;
}
filteredWALEntryCount = -1;
throwExceptionCount++;
if (throwExceptionCount <= maxThrowExceptionCount - 1) {
throw new WALEntryFilterRetryableException("failing filter");
}
return entry;
}
public int getThrowExceptionCount() {
return throwExceptionCount;
}
}
}
|
googleapis/google-cloud-java | 36,064 | java-securesourcemanager/proto-google-cloud-securesourcemanager-v1/src/main/java/com/google/cloud/securesourcemanager/v1/UpdateHookRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/securesourcemanager/v1/secure_source_manager.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.securesourcemanager.v1;
/**
*
*
* <pre>
* UpdateHookRequest is the request to update a hook.
* </pre>
*
* Protobuf type {@code google.cloud.securesourcemanager.v1.UpdateHookRequest}
*/
public final class UpdateHookRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.securesourcemanager.v1.UpdateHookRequest)
UpdateHookRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateHookRequest.newBuilder() to construct.
private UpdateHookRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateHookRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateHookRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_UpdateHookRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_UpdateHookRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securesourcemanager.v1.UpdateHookRequest.class,
com.google.cloud.securesourcemanager.v1.UpdateHookRequest.Builder.class);
}
private int bitField0_;
public static final int UPDATE_MASK_FIELD_NUMBER = 1;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* hook resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask.
* The special value "*" means full replacement.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* hook resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask.
* The special value "*" means full replacement.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* hook resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask.
* The special value "*" means full replacement.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int HOOK_FIELD_NUMBER = 2;
private com.google.cloud.securesourcemanager.v1.Hook hook_;
/**
*
*
* <pre>
* Required. The hook being updated.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the hook field is set.
*/
@java.lang.Override
public boolean hasHook() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The hook being updated.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The hook.
*/
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.Hook getHook() {
return hook_ == null
? com.google.cloud.securesourcemanager.v1.Hook.getDefaultInstance()
: hook_;
}
/**
*
*
* <pre>
* Required. The hook being updated.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.HookOrBuilder getHookOrBuilder() {
return hook_ == null
? com.google.cloud.securesourcemanager.v1.Hook.getDefaultInstance()
: hook_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getHook());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getHook());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.securesourcemanager.v1.UpdateHookRequest)) {
return super.equals(obj);
}
com.google.cloud.securesourcemanager.v1.UpdateHookRequest other =
(com.google.cloud.securesourcemanager.v1.UpdateHookRequest) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (hasHook() != other.hasHook()) return false;
if (hasHook()) {
if (!getHook().equals(other.getHook())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
if (hasHook()) {
hash = (37 * hash) + HOOK_FIELD_NUMBER;
hash = (53 * hash) + getHook().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.securesourcemanager.v1.UpdateHookRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securesourcemanager.v1.UpdateHookRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.UpdateHookRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securesourcemanager.v1.UpdateHookRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.UpdateHookRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securesourcemanager.v1.UpdateHookRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.UpdateHookRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securesourcemanager.v1.UpdateHookRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.UpdateHookRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.securesourcemanager.v1.UpdateHookRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.UpdateHookRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securesourcemanager.v1.UpdateHookRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.securesourcemanager.v1.UpdateHookRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* UpdateHookRequest is the request to update a hook.
* </pre>
*
* Protobuf type {@code google.cloud.securesourcemanager.v1.UpdateHookRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.securesourcemanager.v1.UpdateHookRequest)
com.google.cloud.securesourcemanager.v1.UpdateHookRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_UpdateHookRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_UpdateHookRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securesourcemanager.v1.UpdateHookRequest.class,
com.google.cloud.securesourcemanager.v1.UpdateHookRequest.Builder.class);
}
// Construct using com.google.cloud.securesourcemanager.v1.UpdateHookRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUpdateMaskFieldBuilder();
getHookFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
hook_ = null;
if (hookBuilder_ != null) {
hookBuilder_.dispose();
hookBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_UpdateHookRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.UpdateHookRequest getDefaultInstanceForType() {
return com.google.cloud.securesourcemanager.v1.UpdateHookRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.UpdateHookRequest build() {
com.google.cloud.securesourcemanager.v1.UpdateHookRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.UpdateHookRequest buildPartial() {
com.google.cloud.securesourcemanager.v1.UpdateHookRequest result =
new com.google.cloud.securesourcemanager.v1.UpdateHookRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.securesourcemanager.v1.UpdateHookRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.hook_ = hookBuilder_ == null ? hook_ : hookBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.securesourcemanager.v1.UpdateHookRequest) {
return mergeFrom((com.google.cloud.securesourcemanager.v1.UpdateHookRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.securesourcemanager.v1.UpdateHookRequest other) {
if (other == com.google.cloud.securesourcemanager.v1.UpdateHookRequest.getDefaultInstance())
return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.hasHook()) {
mergeHook(other.getHook());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getHookFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* hook resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask.
* The special value "*" means full replacement.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* hook resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask.
* The special value "*" means full replacement.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* hook resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask.
* The special value "*" means full replacement.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* hook resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask.
* The special value "*" means full replacement.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* hook resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask.
* The special value "*" means full replacement.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* hook resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask.
* The special value "*" means full replacement.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000001);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* hook resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask.
* The special value "*" means full replacement.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* hook resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask.
* The special value "*" means full replacement.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* hook resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask.
* The special value "*" means full replacement.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.cloud.securesourcemanager.v1.Hook hook_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securesourcemanager.v1.Hook,
com.google.cloud.securesourcemanager.v1.Hook.Builder,
com.google.cloud.securesourcemanager.v1.HookOrBuilder>
hookBuilder_;
/**
*
*
* <pre>
* Required. The hook being updated.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the hook field is set.
*/
public boolean hasHook() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The hook being updated.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The hook.
*/
public com.google.cloud.securesourcemanager.v1.Hook getHook() {
if (hookBuilder_ == null) {
return hook_ == null
? com.google.cloud.securesourcemanager.v1.Hook.getDefaultInstance()
: hook_;
} else {
return hookBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The hook being updated.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setHook(com.google.cloud.securesourcemanager.v1.Hook value) {
if (hookBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
hook_ = value;
} else {
hookBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The hook being updated.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setHook(com.google.cloud.securesourcemanager.v1.Hook.Builder builderForValue) {
if (hookBuilder_ == null) {
hook_ = builderForValue.build();
} else {
hookBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The hook being updated.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeHook(com.google.cloud.securesourcemanager.v1.Hook value) {
if (hookBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& hook_ != null
&& hook_ != com.google.cloud.securesourcemanager.v1.Hook.getDefaultInstance()) {
getHookBuilder().mergeFrom(value);
} else {
hook_ = value;
}
} else {
hookBuilder_.mergeFrom(value);
}
if (hook_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The hook being updated.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearHook() {
bitField0_ = (bitField0_ & ~0x00000002);
hook_ = null;
if (hookBuilder_ != null) {
hookBuilder_.dispose();
hookBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The hook being updated.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.securesourcemanager.v1.Hook.Builder getHookBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getHookFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The hook being updated.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.securesourcemanager.v1.HookOrBuilder getHookOrBuilder() {
if (hookBuilder_ != null) {
return hookBuilder_.getMessageOrBuilder();
} else {
return hook_ == null
? com.google.cloud.securesourcemanager.v1.Hook.getDefaultInstance()
: hook_;
}
}
/**
*
*
* <pre>
* Required. The hook being updated.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securesourcemanager.v1.Hook,
com.google.cloud.securesourcemanager.v1.Hook.Builder,
com.google.cloud.securesourcemanager.v1.HookOrBuilder>
getHookFieldBuilder() {
if (hookBuilder_ == null) {
hookBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securesourcemanager.v1.Hook,
com.google.cloud.securesourcemanager.v1.Hook.Builder,
com.google.cloud.securesourcemanager.v1.HookOrBuilder>(
getHook(), getParentForChildren(), isClean());
hook_ = null;
}
return hookBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.securesourcemanager.v1.UpdateHookRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.securesourcemanager.v1.UpdateHookRequest)
private static final com.google.cloud.securesourcemanager.v1.UpdateHookRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.securesourcemanager.v1.UpdateHookRequest();
}
public static com.google.cloud.securesourcemanager.v1.UpdateHookRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateHookRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateHookRequest>() {
@java.lang.Override
public UpdateHookRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateHookRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateHookRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.UpdateHookRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,101 | java-migrationcenter/proto-google-cloud-migrationcenter-v1/src/main/java/com/google/cloud/migrationcenter/v1/GenericInsight.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/migrationcenter/v1/migrationcenter.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.migrationcenter.v1;
/**
*
*
* <pre>
* A generic insight about an asset.
* </pre>
*
* Protobuf type {@code google.cloud.migrationcenter.v1.GenericInsight}
*/
public final class GenericInsight extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.migrationcenter.v1.GenericInsight)
GenericInsightOrBuilder {
private static final long serialVersionUID = 0L;
// Use GenericInsight.newBuilder() to construct.
private GenericInsight(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GenericInsight() {
defaultMessage_ = "";
additionalInformation_ = com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new GenericInsight();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.migrationcenter.v1.MigrationCenterProto
.internal_static_google_cloud_migrationcenter_v1_GenericInsight_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.migrationcenter.v1.MigrationCenterProto
.internal_static_google_cloud_migrationcenter_v1_GenericInsight_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.migrationcenter.v1.GenericInsight.class,
com.google.cloud.migrationcenter.v1.GenericInsight.Builder.class);
}
public static final int MESSAGE_ID_FIELD_NUMBER = 1;
private long messageId_ = 0L;
/**
*
*
* <pre>
* Output only. Represents a globally unique message id for
* this insight, can be used for localization purposes, in case message_code
* is not yet known by the client use default_message instead.
* </pre>
*
* <code>int64 message_id = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The messageId.
*/
@java.lang.Override
public long getMessageId() {
return messageId_;
}
public static final int DEFAULT_MESSAGE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object defaultMessage_ = "";
/**
*
*
* <pre>
* Output only. In case message_code is not yet known by the client
* default_message will be the message to be used instead.
* </pre>
*
* <code>string default_message = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The defaultMessage.
*/
@java.lang.Override
public java.lang.String getDefaultMessage() {
java.lang.Object ref = defaultMessage_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
defaultMessage_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. In case message_code is not yet known by the client
* default_message will be the message to be used instead.
* </pre>
*
* <code>string default_message = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for defaultMessage.
*/
@java.lang.Override
public com.google.protobuf.ByteString getDefaultMessageBytes() {
java.lang.Object ref = defaultMessage_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
defaultMessage_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ADDITIONAL_INFORMATION_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList additionalInformation_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
* <pre>
* Output only. Additional information about the insight, each entry can be a
* logical entry and must make sense if it is displayed with line breaks
* between each entry. Text can contain md style links.
* </pre>
*
* <code>repeated string additional_information = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return A list containing the additionalInformation.
*/
public com.google.protobuf.ProtocolStringList getAdditionalInformationList() {
return additionalInformation_;
}
/**
*
*
* <pre>
* Output only. Additional information about the insight, each entry can be a
* logical entry and must make sense if it is displayed with line breaks
* between each entry. Text can contain md style links.
* </pre>
*
* <code>repeated string additional_information = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The count of additionalInformation.
*/
public int getAdditionalInformationCount() {
return additionalInformation_.size();
}
/**
*
*
* <pre>
* Output only. Additional information about the insight, each entry can be a
* logical entry and must make sense if it is displayed with line breaks
* between each entry. Text can contain md style links.
* </pre>
*
* <code>repeated string additional_information = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param index The index of the element to return.
* @return The additionalInformation at the given index.
*/
public java.lang.String getAdditionalInformation(int index) {
return additionalInformation_.get(index);
}
/**
*
*
* <pre>
* Output only. Additional information about the insight, each entry can be a
* logical entry and must make sense if it is displayed with line breaks
* between each entry. Text can contain md style links.
* </pre>
*
* <code>repeated string additional_information = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param index The index of the value to return.
* @return The bytes of the additionalInformation at the given index.
*/
public com.google.protobuf.ByteString getAdditionalInformationBytes(int index) {
return additionalInformation_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (messageId_ != 0L) {
output.writeInt64(1, messageId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(defaultMessage_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, defaultMessage_);
}
for (int i = 0; i < additionalInformation_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(
output, 3, additionalInformation_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (messageId_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(1, messageId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(defaultMessage_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, defaultMessage_);
}
{
int dataSize = 0;
for (int i = 0; i < additionalInformation_.size(); i++) {
dataSize += computeStringSizeNoTag(additionalInformation_.getRaw(i));
}
size += dataSize;
size += 1 * getAdditionalInformationList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.migrationcenter.v1.GenericInsight)) {
return super.equals(obj);
}
com.google.cloud.migrationcenter.v1.GenericInsight other =
(com.google.cloud.migrationcenter.v1.GenericInsight) obj;
if (getMessageId() != other.getMessageId()) return false;
if (!getDefaultMessage().equals(other.getDefaultMessage())) return false;
if (!getAdditionalInformationList().equals(other.getAdditionalInformationList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + MESSAGE_ID_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getMessageId());
hash = (37 * hash) + DEFAULT_MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getDefaultMessage().hashCode();
if (getAdditionalInformationCount() > 0) {
hash = (37 * hash) + ADDITIONAL_INFORMATION_FIELD_NUMBER;
hash = (53 * hash) + getAdditionalInformationList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.migrationcenter.v1.GenericInsight parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.migrationcenter.v1.GenericInsight parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.migrationcenter.v1.GenericInsight parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.migrationcenter.v1.GenericInsight parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.migrationcenter.v1.GenericInsight parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.migrationcenter.v1.GenericInsight parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.migrationcenter.v1.GenericInsight parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.migrationcenter.v1.GenericInsight parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.migrationcenter.v1.GenericInsight parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.migrationcenter.v1.GenericInsight parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.migrationcenter.v1.GenericInsight parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.migrationcenter.v1.GenericInsight parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.migrationcenter.v1.GenericInsight prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A generic insight about an asset.
* </pre>
*
* Protobuf type {@code google.cloud.migrationcenter.v1.GenericInsight}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.migrationcenter.v1.GenericInsight)
com.google.cloud.migrationcenter.v1.GenericInsightOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.migrationcenter.v1.MigrationCenterProto
.internal_static_google_cloud_migrationcenter_v1_GenericInsight_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.migrationcenter.v1.MigrationCenterProto
.internal_static_google_cloud_migrationcenter_v1_GenericInsight_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.migrationcenter.v1.GenericInsight.class,
com.google.cloud.migrationcenter.v1.GenericInsight.Builder.class);
}
// Construct using com.google.cloud.migrationcenter.v1.GenericInsight.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
messageId_ = 0L;
defaultMessage_ = "";
additionalInformation_ = com.google.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.migrationcenter.v1.MigrationCenterProto
.internal_static_google_cloud_migrationcenter_v1_GenericInsight_descriptor;
}
@java.lang.Override
public com.google.cloud.migrationcenter.v1.GenericInsight getDefaultInstanceForType() {
return com.google.cloud.migrationcenter.v1.GenericInsight.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.migrationcenter.v1.GenericInsight build() {
com.google.cloud.migrationcenter.v1.GenericInsight result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.migrationcenter.v1.GenericInsight buildPartial() {
com.google.cloud.migrationcenter.v1.GenericInsight result =
new com.google.cloud.migrationcenter.v1.GenericInsight(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.migrationcenter.v1.GenericInsight result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.messageId_ = messageId_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.defaultMessage_ = defaultMessage_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
additionalInformation_.makeImmutable();
result.additionalInformation_ = additionalInformation_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.migrationcenter.v1.GenericInsight) {
return mergeFrom((com.google.cloud.migrationcenter.v1.GenericInsight) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.migrationcenter.v1.GenericInsight other) {
if (other == com.google.cloud.migrationcenter.v1.GenericInsight.getDefaultInstance())
return this;
if (other.getMessageId() != 0L) {
setMessageId(other.getMessageId());
}
if (!other.getDefaultMessage().isEmpty()) {
defaultMessage_ = other.defaultMessage_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.additionalInformation_.isEmpty()) {
if (additionalInformation_.isEmpty()) {
additionalInformation_ = other.additionalInformation_;
bitField0_ |= 0x00000004;
} else {
ensureAdditionalInformationIsMutable();
additionalInformation_.addAll(other.additionalInformation_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
messageId_ = input.readInt64();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
defaultMessage_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
ensureAdditionalInformationIsMutable();
additionalInformation_.add(s);
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private long messageId_;
/**
*
*
* <pre>
* Output only. Represents a globally unique message id for
* this insight, can be used for localization purposes, in case message_code
* is not yet known by the client use default_message instead.
* </pre>
*
* <code>int64 message_id = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The messageId.
*/
@java.lang.Override
public long getMessageId() {
return messageId_;
}
/**
*
*
* <pre>
* Output only. Represents a globally unique message id for
* this insight, can be used for localization purposes, in case message_code
* is not yet known by the client use default_message instead.
* </pre>
*
* <code>int64 message_id = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The messageId to set.
* @return This builder for chaining.
*/
public Builder setMessageId(long value) {
messageId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Represents a globally unique message id for
* this insight, can be used for localization purposes, in case message_code
* is not yet known by the client use default_message instead.
* </pre>
*
* <code>int64 message_id = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearMessageId() {
bitField0_ = (bitField0_ & ~0x00000001);
messageId_ = 0L;
onChanged();
return this;
}
private java.lang.Object defaultMessage_ = "";
/**
*
*
* <pre>
* Output only. In case message_code is not yet known by the client
* default_message will be the message to be used instead.
* </pre>
*
* <code>string default_message = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The defaultMessage.
*/
public java.lang.String getDefaultMessage() {
java.lang.Object ref = defaultMessage_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
defaultMessage_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. In case message_code is not yet known by the client
* default_message will be the message to be used instead.
* </pre>
*
* <code>string default_message = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for defaultMessage.
*/
public com.google.protobuf.ByteString getDefaultMessageBytes() {
java.lang.Object ref = defaultMessage_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
defaultMessage_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. In case message_code is not yet known by the client
* default_message will be the message to be used instead.
* </pre>
*
* <code>string default_message = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The defaultMessage to set.
* @return This builder for chaining.
*/
public Builder setDefaultMessage(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
defaultMessage_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. In case message_code is not yet known by the client
* default_message will be the message to be used instead.
* </pre>
*
* <code>string default_message = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearDefaultMessage() {
defaultMessage_ = getDefaultInstance().getDefaultMessage();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. In case message_code is not yet known by the client
* default_message will be the message to be used instead.
* </pre>
*
* <code>string default_message = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for defaultMessage to set.
* @return This builder for chaining.
*/
public Builder setDefaultMessageBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
defaultMessage_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList additionalInformation_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureAdditionalInformationIsMutable() {
if (!additionalInformation_.isModifiable()) {
additionalInformation_ =
new com.google.protobuf.LazyStringArrayList(additionalInformation_);
}
bitField0_ |= 0x00000004;
}
/**
*
*
* <pre>
* Output only. Additional information about the insight, each entry can be a
* logical entry and must make sense if it is displayed with line breaks
* between each entry. Text can contain md style links.
* </pre>
*
* <code>
* repeated string additional_information = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return A list containing the additionalInformation.
*/
public com.google.protobuf.ProtocolStringList getAdditionalInformationList() {
additionalInformation_.makeImmutable();
return additionalInformation_;
}
/**
*
*
* <pre>
* Output only. Additional information about the insight, each entry can be a
* logical entry and must make sense if it is displayed with line breaks
* between each entry. Text can contain md style links.
* </pre>
*
* <code>
* repeated string additional_information = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The count of additionalInformation.
*/
public int getAdditionalInformationCount() {
return additionalInformation_.size();
}
/**
*
*
* <pre>
* Output only. Additional information about the insight, each entry can be a
* logical entry and must make sense if it is displayed with line breaks
* between each entry. Text can contain md style links.
* </pre>
*
* <code>
* repeated string additional_information = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param index The index of the element to return.
* @return The additionalInformation at the given index.
*/
public java.lang.String getAdditionalInformation(int index) {
return additionalInformation_.get(index);
}
/**
*
*
* <pre>
* Output only. Additional information about the insight, each entry can be a
* logical entry and must make sense if it is displayed with line breaks
* between each entry. Text can contain md style links.
* </pre>
*
* <code>
* repeated string additional_information = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param index The index of the value to return.
* @return The bytes of the additionalInformation at the given index.
*/
public com.google.protobuf.ByteString getAdditionalInformationBytes(int index) {
return additionalInformation_.getByteString(index);
}
/**
*
*
* <pre>
* Output only. Additional information about the insight, each entry can be a
* logical entry and must make sense if it is displayed with line breaks
* between each entry. Text can contain md style links.
* </pre>
*
* <code>
* repeated string additional_information = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param index The index to set the value at.
* @param value The additionalInformation to set.
* @return This builder for chaining.
*/
public Builder setAdditionalInformation(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureAdditionalInformationIsMutable();
additionalInformation_.set(index, value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Additional information about the insight, each entry can be a
* logical entry and must make sense if it is displayed with line breaks
* between each entry. Text can contain md style links.
* </pre>
*
* <code>
* repeated string additional_information = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The additionalInformation to add.
* @return This builder for chaining.
*/
public Builder addAdditionalInformation(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureAdditionalInformationIsMutable();
additionalInformation_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Additional information about the insight, each entry can be a
* logical entry and must make sense if it is displayed with line breaks
* between each entry. Text can contain md style links.
* </pre>
*
* <code>
* repeated string additional_information = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param values The additionalInformation to add.
* @return This builder for chaining.
*/
public Builder addAllAdditionalInformation(java.lang.Iterable<java.lang.String> values) {
ensureAdditionalInformationIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, additionalInformation_);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Additional information about the insight, each entry can be a
* logical entry and must make sense if it is displayed with line breaks
* between each entry. Text can contain md style links.
* </pre>
*
* <code>
* repeated string additional_information = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearAdditionalInformation() {
additionalInformation_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Additional information about the insight, each entry can be a
* logical entry and must make sense if it is displayed with line breaks
* between each entry. Text can contain md style links.
* </pre>
*
* <code>
* repeated string additional_information = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The bytes of the additionalInformation to add.
* @return This builder for chaining.
*/
public Builder addAdditionalInformationBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureAdditionalInformationIsMutable();
additionalInformation_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.migrationcenter.v1.GenericInsight)
}
// @@protoc_insertion_point(class_scope:google.cloud.migrationcenter.v1.GenericInsight)
private static final com.google.cloud.migrationcenter.v1.GenericInsight DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.migrationcenter.v1.GenericInsight();
}
public static com.google.cloud.migrationcenter.v1.GenericInsight getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GenericInsight> PARSER =
new com.google.protobuf.AbstractParser<GenericInsight>() {
@java.lang.Override
public GenericInsight parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<GenericInsight> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GenericInsight> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.migrationcenter.v1.GenericInsight getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,108 | java-managedkafka/proto-google-cloud-managedkafka-v1/src/main/java/com/google/cloud/managedkafka/v1/TaskRetryPolicy.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/managedkafka/v1/resources.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.managedkafka.v1;
/**
*
*
* <pre>
* Task Retry Policy is implemented on a best-effort
* basis.
* Retry delay will be exponential based on provided minimum and maximum
* backoffs. https://en.wikipedia.org/wiki/Exponential_backoff.
* Note that the delay between consecutive task restarts may not always
* precisely match the configured settings. This can happen when the
* ConnectCluster is in rebalancing state or if the ConnectCluster is
* unresponsive etc. The default values for minimum and maximum backoffs are
* 60 seconds and 30 minutes respectively.
* </pre>
*
* Protobuf type {@code google.cloud.managedkafka.v1.TaskRetryPolicy}
*/
public final class TaskRetryPolicy extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.managedkafka.v1.TaskRetryPolicy)
TaskRetryPolicyOrBuilder {
private static final long serialVersionUID = 0L;
// Use TaskRetryPolicy.newBuilder() to construct.
private TaskRetryPolicy(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TaskRetryPolicy() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TaskRetryPolicy();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.managedkafka.v1.ResourcesProto
.internal_static_google_cloud_managedkafka_v1_TaskRetryPolicy_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.managedkafka.v1.ResourcesProto
.internal_static_google_cloud_managedkafka_v1_TaskRetryPolicy_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.managedkafka.v1.TaskRetryPolicy.class,
com.google.cloud.managedkafka.v1.TaskRetryPolicy.Builder.class);
}
private int bitField0_;
public static final int MINIMUM_BACKOFF_FIELD_NUMBER = 1;
private com.google.protobuf.Duration minimumBackoff_;
/**
*
*
* <pre>
* Optional. The minimum amount of time to wait before retrying a failed task.
* This sets a lower bound for the backoff delay.
* </pre>
*
* <code>.google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the minimumBackoff field is set.
*/
@java.lang.Override
public boolean hasMinimumBackoff() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. The minimum amount of time to wait before retrying a failed task.
* This sets a lower bound for the backoff delay.
* </pre>
*
* <code>.google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The minimumBackoff.
*/
@java.lang.Override
public com.google.protobuf.Duration getMinimumBackoff() {
return minimumBackoff_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: minimumBackoff_;
}
/**
*
*
* <pre>
* Optional. The minimum amount of time to wait before retrying a failed task.
* This sets a lower bound for the backoff delay.
* </pre>
*
* <code>.google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.protobuf.DurationOrBuilder getMinimumBackoffOrBuilder() {
return minimumBackoff_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: minimumBackoff_;
}
public static final int MAXIMUM_BACKOFF_FIELD_NUMBER = 2;
private com.google.protobuf.Duration maximumBackoff_;
/**
*
*
* <pre>
* Optional. The maximum amount of time to wait before retrying a failed task.
* This sets an upper bound for the backoff delay.
* </pre>
*
* <code>.google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the maximumBackoff field is set.
*/
@java.lang.Override
public boolean hasMaximumBackoff() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. The maximum amount of time to wait before retrying a failed task.
* This sets an upper bound for the backoff delay.
* </pre>
*
* <code>.google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The maximumBackoff.
*/
@java.lang.Override
public com.google.protobuf.Duration getMaximumBackoff() {
return maximumBackoff_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: maximumBackoff_;
}
/**
*
*
* <pre>
* Optional. The maximum amount of time to wait before retrying a failed task.
* This sets an upper bound for the backoff delay.
* </pre>
*
* <code>.google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.protobuf.DurationOrBuilder getMaximumBackoffOrBuilder() {
return maximumBackoff_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: maximumBackoff_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getMinimumBackoff());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getMaximumBackoff());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getMinimumBackoff());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getMaximumBackoff());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.managedkafka.v1.TaskRetryPolicy)) {
return super.equals(obj);
}
com.google.cloud.managedkafka.v1.TaskRetryPolicy other =
(com.google.cloud.managedkafka.v1.TaskRetryPolicy) obj;
if (hasMinimumBackoff() != other.hasMinimumBackoff()) return false;
if (hasMinimumBackoff()) {
if (!getMinimumBackoff().equals(other.getMinimumBackoff())) return false;
}
if (hasMaximumBackoff() != other.hasMaximumBackoff()) return false;
if (hasMaximumBackoff()) {
if (!getMaximumBackoff().equals(other.getMaximumBackoff())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasMinimumBackoff()) {
hash = (37 * hash) + MINIMUM_BACKOFF_FIELD_NUMBER;
hash = (53 * hash) + getMinimumBackoff().hashCode();
}
if (hasMaximumBackoff()) {
hash = (37 * hash) + MAXIMUM_BACKOFF_FIELD_NUMBER;
hash = (53 * hash) + getMaximumBackoff().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.managedkafka.v1.TaskRetryPolicy parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.TaskRetryPolicy parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.TaskRetryPolicy parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.TaskRetryPolicy parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.TaskRetryPolicy parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.TaskRetryPolicy parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.TaskRetryPolicy parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.TaskRetryPolicy parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.TaskRetryPolicy parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.TaskRetryPolicy parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.TaskRetryPolicy parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.TaskRetryPolicy parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.managedkafka.v1.TaskRetryPolicy prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Task Retry Policy is implemented on a best-effort
* basis.
* Retry delay will be exponential based on provided minimum and maximum
* backoffs. https://en.wikipedia.org/wiki/Exponential_backoff.
* Note that the delay between consecutive task restarts may not always
* precisely match the configured settings. This can happen when the
* ConnectCluster is in rebalancing state or if the ConnectCluster is
* unresponsive etc. The default values for minimum and maximum backoffs are
* 60 seconds and 30 minutes respectively.
* </pre>
*
* Protobuf type {@code google.cloud.managedkafka.v1.TaskRetryPolicy}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.managedkafka.v1.TaskRetryPolicy)
com.google.cloud.managedkafka.v1.TaskRetryPolicyOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.managedkafka.v1.ResourcesProto
.internal_static_google_cloud_managedkafka_v1_TaskRetryPolicy_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.managedkafka.v1.ResourcesProto
.internal_static_google_cloud_managedkafka_v1_TaskRetryPolicy_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.managedkafka.v1.TaskRetryPolicy.class,
com.google.cloud.managedkafka.v1.TaskRetryPolicy.Builder.class);
}
// Construct using com.google.cloud.managedkafka.v1.TaskRetryPolicy.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getMinimumBackoffFieldBuilder();
getMaximumBackoffFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
minimumBackoff_ = null;
if (minimumBackoffBuilder_ != null) {
minimumBackoffBuilder_.dispose();
minimumBackoffBuilder_ = null;
}
maximumBackoff_ = null;
if (maximumBackoffBuilder_ != null) {
maximumBackoffBuilder_.dispose();
maximumBackoffBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.managedkafka.v1.ResourcesProto
.internal_static_google_cloud_managedkafka_v1_TaskRetryPolicy_descriptor;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.TaskRetryPolicy getDefaultInstanceForType() {
return com.google.cloud.managedkafka.v1.TaskRetryPolicy.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.TaskRetryPolicy build() {
com.google.cloud.managedkafka.v1.TaskRetryPolicy result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.TaskRetryPolicy buildPartial() {
com.google.cloud.managedkafka.v1.TaskRetryPolicy result =
new com.google.cloud.managedkafka.v1.TaskRetryPolicy(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.managedkafka.v1.TaskRetryPolicy result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.minimumBackoff_ =
minimumBackoffBuilder_ == null ? minimumBackoff_ : minimumBackoffBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.maximumBackoff_ =
maximumBackoffBuilder_ == null ? maximumBackoff_ : maximumBackoffBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.managedkafka.v1.TaskRetryPolicy) {
return mergeFrom((com.google.cloud.managedkafka.v1.TaskRetryPolicy) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.managedkafka.v1.TaskRetryPolicy other) {
if (other == com.google.cloud.managedkafka.v1.TaskRetryPolicy.getDefaultInstance())
return this;
if (other.hasMinimumBackoff()) {
mergeMinimumBackoff(other.getMinimumBackoff());
}
if (other.hasMaximumBackoff()) {
mergeMaximumBackoff(other.getMaximumBackoff());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getMinimumBackoffFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getMaximumBackoffFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.Duration minimumBackoff_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
minimumBackoffBuilder_;
/**
*
*
* <pre>
* Optional. The minimum amount of time to wait before retrying a failed task.
* This sets a lower bound for the backoff delay.
* </pre>
*
* <code>
* .google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the minimumBackoff field is set.
*/
public boolean hasMinimumBackoff() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. The minimum amount of time to wait before retrying a failed task.
* This sets a lower bound for the backoff delay.
* </pre>
*
* <code>
* .google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The minimumBackoff.
*/
public com.google.protobuf.Duration getMinimumBackoff() {
if (minimumBackoffBuilder_ == null) {
return minimumBackoff_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: minimumBackoff_;
} else {
return minimumBackoffBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. The minimum amount of time to wait before retrying a failed task.
* This sets a lower bound for the backoff delay.
* </pre>
*
* <code>
* .google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setMinimumBackoff(com.google.protobuf.Duration value) {
if (minimumBackoffBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
minimumBackoff_ = value;
} else {
minimumBackoffBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The minimum amount of time to wait before retrying a failed task.
* This sets a lower bound for the backoff delay.
* </pre>
*
* <code>
* .google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setMinimumBackoff(com.google.protobuf.Duration.Builder builderForValue) {
if (minimumBackoffBuilder_ == null) {
minimumBackoff_ = builderForValue.build();
} else {
minimumBackoffBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The minimum amount of time to wait before retrying a failed task.
* This sets a lower bound for the backoff delay.
* </pre>
*
* <code>
* .google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeMinimumBackoff(com.google.protobuf.Duration value) {
if (minimumBackoffBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& minimumBackoff_ != null
&& minimumBackoff_ != com.google.protobuf.Duration.getDefaultInstance()) {
getMinimumBackoffBuilder().mergeFrom(value);
} else {
minimumBackoff_ = value;
}
} else {
minimumBackoffBuilder_.mergeFrom(value);
}
if (minimumBackoff_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. The minimum amount of time to wait before retrying a failed task.
* This sets a lower bound for the backoff delay.
* </pre>
*
* <code>
* .google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearMinimumBackoff() {
bitField0_ = (bitField0_ & ~0x00000001);
minimumBackoff_ = null;
if (minimumBackoffBuilder_ != null) {
minimumBackoffBuilder_.dispose();
minimumBackoffBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The minimum amount of time to wait before retrying a failed task.
* This sets a lower bound for the backoff delay.
* </pre>
*
* <code>
* .google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.Duration.Builder getMinimumBackoffBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getMinimumBackoffFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. The minimum amount of time to wait before retrying a failed task.
* This sets a lower bound for the backoff delay.
* </pre>
*
* <code>
* .google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.DurationOrBuilder getMinimumBackoffOrBuilder() {
if (minimumBackoffBuilder_ != null) {
return minimumBackoffBuilder_.getMessageOrBuilder();
} else {
return minimumBackoff_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: minimumBackoff_;
}
}
/**
*
*
* <pre>
* Optional. The minimum amount of time to wait before retrying a failed task.
* This sets a lower bound for the backoff delay.
* </pre>
*
* <code>
* .google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
getMinimumBackoffFieldBuilder() {
if (minimumBackoffBuilder_ == null) {
minimumBackoffBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>(
getMinimumBackoff(), getParentForChildren(), isClean());
minimumBackoff_ = null;
}
return minimumBackoffBuilder_;
}
private com.google.protobuf.Duration maximumBackoff_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
maximumBackoffBuilder_;
/**
*
*
* <pre>
* Optional. The maximum amount of time to wait before retrying a failed task.
* This sets an upper bound for the backoff delay.
* </pre>
*
* <code>
* .google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the maximumBackoff field is set.
*/
public boolean hasMaximumBackoff() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. The maximum amount of time to wait before retrying a failed task.
* This sets an upper bound for the backoff delay.
* </pre>
*
* <code>
* .google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The maximumBackoff.
*/
public com.google.protobuf.Duration getMaximumBackoff() {
if (maximumBackoffBuilder_ == null) {
return maximumBackoff_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: maximumBackoff_;
} else {
return maximumBackoffBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. The maximum amount of time to wait before retrying a failed task.
* This sets an upper bound for the backoff delay.
* </pre>
*
* <code>
* .google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setMaximumBackoff(com.google.protobuf.Duration value) {
if (maximumBackoffBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
maximumBackoff_ = value;
} else {
maximumBackoffBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum amount of time to wait before retrying a failed task.
* This sets an upper bound for the backoff delay.
* </pre>
*
* <code>
* .google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setMaximumBackoff(com.google.protobuf.Duration.Builder builderForValue) {
if (maximumBackoffBuilder_ == null) {
maximumBackoff_ = builderForValue.build();
} else {
maximumBackoffBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum amount of time to wait before retrying a failed task.
* This sets an upper bound for the backoff delay.
* </pre>
*
* <code>
* .google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeMaximumBackoff(com.google.protobuf.Duration value) {
if (maximumBackoffBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& maximumBackoff_ != null
&& maximumBackoff_ != com.google.protobuf.Duration.getDefaultInstance()) {
getMaximumBackoffBuilder().mergeFrom(value);
} else {
maximumBackoff_ = value;
}
} else {
maximumBackoffBuilder_.mergeFrom(value);
}
if (maximumBackoff_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. The maximum amount of time to wait before retrying a failed task.
* This sets an upper bound for the backoff delay.
* </pre>
*
* <code>
* .google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearMaximumBackoff() {
bitField0_ = (bitField0_ & ~0x00000002);
maximumBackoff_ = null;
if (maximumBackoffBuilder_ != null) {
maximumBackoffBuilder_.dispose();
maximumBackoffBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum amount of time to wait before retrying a failed task.
* This sets an upper bound for the backoff delay.
* </pre>
*
* <code>
* .google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.Duration.Builder getMaximumBackoffBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getMaximumBackoffFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. The maximum amount of time to wait before retrying a failed task.
* This sets an upper bound for the backoff delay.
* </pre>
*
* <code>
* .google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.DurationOrBuilder getMaximumBackoffOrBuilder() {
if (maximumBackoffBuilder_ != null) {
return maximumBackoffBuilder_.getMessageOrBuilder();
} else {
return maximumBackoff_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: maximumBackoff_;
}
}
/**
*
*
* <pre>
* Optional. The maximum amount of time to wait before retrying a failed task.
* This sets an upper bound for the backoff delay.
* </pre>
*
* <code>
* .google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
getMaximumBackoffFieldBuilder() {
if (maximumBackoffBuilder_ == null) {
maximumBackoffBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>(
getMaximumBackoff(), getParentForChildren(), isClean());
maximumBackoff_ = null;
}
return maximumBackoffBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.managedkafka.v1.TaskRetryPolicy)
}
// @@protoc_insertion_point(class_scope:google.cloud.managedkafka.v1.TaskRetryPolicy)
private static final com.google.cloud.managedkafka.v1.TaskRetryPolicy DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.managedkafka.v1.TaskRetryPolicy();
}
public static com.google.cloud.managedkafka.v1.TaskRetryPolicy getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TaskRetryPolicy> PARSER =
new com.google.protobuf.AbstractParser<TaskRetryPolicy>() {
@java.lang.Override
public TaskRetryPolicy parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TaskRetryPolicy> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TaskRetryPolicy> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.TaskRetryPolicy getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/iotdb | 35,236 | iotdb-core/node-commons/src/test/java/org/apache/iotdb/commons/path/PartialPathTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.commons.path;
import org.apache.iotdb.commons.exception.IllegalPathException;
import org.apache.tsfile.enums.TSDataType;
import org.apache.tsfile.file.metadata.IDeviceID;
import org.apache.tsfile.write.schema.MeasurementSchema;
import org.junit.Assert;
import org.junit.Test;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class PartialPathTest {
@Test
public void testLegalPath() throws IllegalPathException {
String[] nodes;
// empty path
PartialPath a = new PartialPath("", false);
Assert.assertEquals("", a.getFullPath());
Assert.assertEquals(0, a.getNodes().length);
// suffix path
PartialPath b = new PartialPath("s1");
Assert.assertEquals("s1", b.getFullPath());
Assert.assertEquals("s1", b.getNodes()[0]);
// normal node
PartialPath c = new PartialPath("root.sg.a");
Assert.assertEquals("root.sg.a", c.getFullPath());
nodes = new String[] {"root", "sg", "a"};
checkNodes(nodes, c.getNodes());
// quoted node
PartialPath d = new PartialPath("root.sg.`a.b`");
Assert.assertEquals("root.sg.`a.b`", d.getFullPath());
nodes = new String[] {"root", "sg", "`a.b`"};
checkNodes(nodes, d.getNodes());
PartialPath e = new PartialPath("root.sg.`a.``b`");
Assert.assertEquals("root.sg.`a.``b`", e.getFullPath());
nodes = new String[] {"root", "sg", "`a.``b`"};
checkNodes(nodes, e.getNodes());
PartialPath f = new PartialPath("root.`sg\"`.`a.``b`");
Assert.assertEquals("root.`sg\"`.`a.``b`", f.getFullPath());
nodes = new String[] {"root", "`sg\"`", "`a.``b`"};
checkNodes(nodes, f.getNodes());
PartialPath g = new PartialPath("root.sg.`a.b\\\\`");
Assert.assertEquals("root.sg.`a.b\\\\`", g.getFullPath());
nodes = new String[] {"root", "sg", "`a.b\\\\`"};
checkNodes(nodes, g.getNodes());
// quoted node of digits
PartialPath h = new PartialPath("root.sg.`111`");
Assert.assertEquals("root.sg.`111`", h.getFullPath());
nodes = new String[] {"root", "sg", "`111`"};
checkNodes(nodes, h.getNodes());
// quoted node of key word
PartialPath i = new PartialPath("root.sg.`select`");
Assert.assertEquals("root.sg.select", i.getFullPath());
nodes = new String[] {"root", "sg", "select"};
checkNodes(nodes, i.getNodes());
// wildcard
PartialPath j = new PartialPath("root.sg.`a*b`");
Assert.assertEquals("root.sg.`a*b`", j.getFullPath());
nodes = new String[] {"root", "sg", "`a*b`"};
checkNodes(nodes, j.getNodes());
PartialPath k = new PartialPath("root.sg.*");
Assert.assertEquals("root.sg.*", k.getFullPath());
nodes = new String[] {"root", "sg", "*"};
checkNodes(nodes, k.getNodes());
PartialPath l = new PartialPath("root.sg.**");
Assert.assertEquals("root.sg.**", l.getFullPath());
nodes = new String[] {"root", "sg", "**"};
checkNodes(nodes, l.getNodes());
// raw key word
PartialPath m = new PartialPath("root.sg.select");
Assert.assertEquals("root.sg.select", m.getFullPath());
nodes = new String[] {"root", "sg", "select"};
checkNodes(nodes, m.getNodes());
PartialPath n = new PartialPath("root.sg.device");
Assert.assertEquals("root.sg.device", n.getFullPath());
nodes = new String[] {"root", "sg", "device"};
checkNodes(nodes, n.getNodes());
PartialPath o = new PartialPath("root.sg.datatype");
Assert.assertEquals("root.sg.datatype", o.getFullPath());
nodes = new String[] {"root", "sg", "datatype"};
checkNodes(nodes, o.getNodes());
PartialPath r = new PartialPath("root.sg.boolean");
Assert.assertEquals("root.sg.boolean", r.getFullPath());
nodes = new String[] {"root", "sg", "boolean"};
checkNodes(nodes, r.getNodes());
PartialPath s = new PartialPath("root.sg.DROP_TRIGGER");
Assert.assertEquals("root.sg.DROP_TRIGGER", s.getFullPath());
nodes = new String[] {"root", "sg", "DROP_TRIGGER"};
checkNodes(nodes, s.getNodes());
PartialPath t = new PartialPath("root.sg.`abc`");
Assert.assertEquals("root.sg.abc", t.getFullPath());
nodes = new String[] {"root", "sg", "abc"};
checkNodes(nodes, t.getNodes());
PartialPath u = new PartialPath("root.sg.set_storage_group");
Assert.assertEquals("root.sg.set_storage_group", u.getFullPath());
nodes = new String[] {"root", "sg", "set_storage_group"};
checkNodes(nodes, u.getNodes());
}
@Test
public void testIllegalPath() {
try {
new PartialPath("root.sg.d1.```");
fail();
} catch (IllegalPathException ignored) {
}
try {
new PartialPath("root.sg.d1\na");
fail();
} catch (IllegalPathException ignored) {
}
try {
new PartialPath("root.sg.`d1`..`aa``b`");
fail();
} catch (IllegalPathException ignored) {
}
try {
new PartialPath("root.sg.d1.`s+`-1\"`");
fail();
} catch (IllegalPathException ignored) {
}
try {
new PartialPath("root..a");
fail();
} catch (IllegalPathException ignored) {
}
try {
new PartialPath("root.sg.d1.");
fail();
} catch (IllegalPathException ignored) {
}
try {
new PartialPath("root.sg.111");
fail();
} catch (IllegalPathException ignored) {
}
try {
new PartialPath("root.sg.time");
fail();
} catch (IllegalPathException ignored) {
}
try {
new PartialPath("root.sg.root");
fail();
} catch (IllegalPathException ignored) {
}
try {
new PartialPath("root.sg.timestamp");
fail();
} catch (IllegalPathException ignored) {
}
}
@Test
public void testLegalDeviceAndMeasurement() throws IllegalPathException {
String[] nodes;
// normal node
PartialPath a = new MeasurementPath("root.sg", "s1");
Assert.assertEquals("root.sg.s1", a.getFullPath());
nodes = new String[] {"root", "sg", "s1"};
checkNodes(nodes, a.getNodes());
PartialPath b = new MeasurementPath("root.sg", "s2");
Assert.assertEquals("root.sg.s2", b.getFullPath());
nodes = new String[] {"root", "sg", "s2"};
checkNodes(nodes, b.getNodes());
PartialPath c = new MeasurementPath("root.sg", "a");
Assert.assertEquals("root.sg.a", c.getFullPath());
nodes = new String[] {"root", "sg", "a"};
checkNodes(nodes, c.getNodes());
// quoted node
PartialPath d = new MeasurementPath("root.sg", "`a.b`");
Assert.assertEquals("root.sg.`a.b`", d.getFullPath());
nodes = new String[] {"root", "sg", "`a.b`"};
checkNodes(nodes, d.getNodes());
PartialPath e = new MeasurementPath("root.sg", "`a.``b`");
Assert.assertEquals("root.sg.`a.``b`", e.getFullPath());
nodes = new String[] {"root", "sg", "`a.``b`"};
checkNodes(nodes, e.getNodes());
PartialPath f = new MeasurementPath("root.`sg\"`", "`a.``b`");
Assert.assertEquals("root.`sg\"`.`a.``b`", f.getFullPath());
nodes = new String[] {"root", "`sg\"`", "`a.``b`"};
checkNodes(nodes, f.getNodes());
PartialPath g = new MeasurementPath("root.sg", "`a.b\\\\`");
Assert.assertEquals("root.sg.`a.b\\\\`", g.getFullPath());
nodes = new String[] {"root", "sg", "`a.b\\\\`"};
checkNodes(nodes, g.getNodes());
// quoted node of digits
PartialPath h = new MeasurementPath("root.sg", "`111`");
Assert.assertEquals("root.sg.`111`", h.getFullPath());
nodes = new String[] {"root", "sg", "`111`"};
checkNodes(nodes, h.getNodes());
// quoted node of key word
PartialPath i = new MeasurementPath("root.sg", "`select`");
Assert.assertEquals("root.sg.select", i.getFullPath());
nodes = new String[] {"root", "sg", "select"};
checkNodes(nodes, i.getNodes());
// wildcard
PartialPath j = new MeasurementPath("root.sg", "`a*b`");
Assert.assertEquals("root.sg.`a*b`", j.getFullPath());
nodes = new String[] {"root", "sg", "`a*b`"};
checkNodes(nodes, j.getNodes());
PartialPath k = new MeasurementPath("root.sg", "*");
Assert.assertEquals("root.sg.*", k.getFullPath());
nodes = new String[] {"root", "sg", "*"};
checkNodes(nodes, k.getNodes());
PartialPath l = new MeasurementPath("root.sg", "**");
Assert.assertEquals("root.sg.**", l.getFullPath());
nodes = new String[] {"root", "sg", "**"};
checkNodes(nodes, l.getNodes());
// other
PartialPath m = new MeasurementPath("root.sg", "`to`.be.prefix.s");
Assert.assertEquals("root.sg.to.be.prefix.s", m.getFullPath());
nodes = new String[] {"root", "sg", "to", "be", "prefix", "s"};
checkNodes(nodes, m.getNodes());
PartialPath n = new MeasurementPath("root.sg", "`abc`");
Assert.assertEquals("root.sg.abc", n.getFullPath());
nodes = new String[] {"root", "sg", "abc"};
checkNodes(nodes, n.getNodes());
}
@Test
public void testIllegalDeviceAndMeasurement() {
try {
new MeasurementPath("root.sg.d1", "```");
fail();
} catch (IllegalPathException ignored) {
}
try {
new MeasurementPath("root.sg.d1.```", "s1");
fail();
} catch (IllegalPathException ignored) {
}
try {
new MeasurementPath("root.sg.`d1`..a", "`aa``b`");
fail();
} catch (IllegalPathException ignored) {
}
try {
new MeasurementPath("root.sg.`d1`.a", "s..`aa``b`");
fail();
} catch (IllegalPathException ignored) {
}
try {
new MeasurementPath("root.sg.d1", "`s+`-1\"`");
fail();
} catch (IllegalPathException ignored) {
}
try {
new MeasurementPath("root.sg.d1.`s+`-1\"`", "s1");
fail();
} catch (IllegalPathException ignored) {
}
try {
new MeasurementPath("root.sg", "111");
fail();
} catch (IllegalPathException ignored) {
}
try {
new MeasurementPath("root.sg.111", "s1");
fail();
} catch (IllegalPathException ignored) {
}
try {
new MeasurementPath("root.sg.select`", "a");
fail();
} catch (IllegalPathException ignored) {
}
try {
new MeasurementPath("root.sg.d1", "device`");
fail();
} catch (IllegalPathException ignored) {
}
try {
new MeasurementPath("root.sg.d1", "root");
fail();
} catch (IllegalPathException ignored) {
}
try {
new MeasurementPath("root.sg.d1", "time");
fail();
} catch (IllegalPathException ignored) {
}
try {
new MeasurementPath("root.sg.d1", "timestamp");
fail();
} catch (IllegalPathException ignored) {
}
}
@Test
public void testConcatPath() {
String[] arr1 = new String[2];
arr1[0] = "root";
arr1[1] = "sg1";
PartialPath a = new PartialPath(arr1);
String[] arr2 = new String[2];
arr2[0] = "d1";
arr2[1] = "s1";
PartialPath b = new PartialPath(arr2);
Assert.assertEquals("[root, sg1, d1, s1]", Arrays.toString(a.concatPath(b).getNodes()));
Assert.assertEquals("s1", b.getTailNode());
Assert.assertEquals("root.sg1.d1", a.concatAsMeasurementPath(b).getDevicePath().getFullPath());
Assert.assertEquals("root.sg1", a.toString());
}
@Test
public void testConcatArray() throws IllegalPathException {
PartialPath a = new PartialPath("root", "sg1");
String[] arr2 = new String[2];
arr2[0] = "d1";
arr2[1] = "s1";
a.concatPath(arr2);
Assert.assertEquals("[root, sg1, d1, s1]", Arrays.toString(a.getNodes()));
}
@Test
public void testConcatNode() {
String[] arr1 = new String[2];
arr1[0] = "root";
arr1[1] = "sg1";
PartialPath a = new PartialPath(arr1);
PartialPath b = a.concatNode("d1");
Assert.assertEquals("[root, sg1, d1]", Arrays.toString(b.getNodes()));
Assert.assertEquals("root.sg1.d1", b.getFullPath());
Assert.assertTrue(b.startsWithOrPrefixOf(arr1));
Assert.assertEquals("root", b.getFirstNode());
}
@Test
public void testAlterPrefixPath() throws IllegalPathException {
// Plain path.
PartialPath p = new PartialPath("root.a.b.c");
List<PartialPath> results = p.alterPrefixPath(new PartialPath("root.a.b"));
Assert.assertEquals(results.toString(), 1, results.size());
Assert.assertEquals("root.a.b.c", results.get(0).getFullPath());
// Path with single level wildcard.
p = new PartialPath("root.*.b.c");
results = p.alterPrefixPath(new PartialPath("root.a.b"));
Assert.assertEquals(results.toString(), 1, results.size());
Assert.assertEquals("root.a.b.c", results.get(0).getFullPath());
// Path with multi level wildcard.
p = new PartialPath("root.**.b.c");
results = p.alterPrefixPath(new PartialPath("root.a.b"));
Assert.assertEquals(results.toString(), 3, results.size());
Assert.assertTrue(results.toString(), results.contains(new PartialPath("root.a.b.c")));
Assert.assertTrue(results.toString(), results.contains(new PartialPath("root.a.b.b.c")));
Assert.assertTrue(results.toString(), results.contains(new PartialPath("root.a.b.**.b.c")));
p = new PartialPath("root.**");
results = p.alterPrefixPath(new PartialPath("root.a.b"));
Assert.assertEquals(results.toString(), 2, results.size());
Assert.assertTrue(results.toString(), results.contains(new PartialPath("root.a.b")));
Assert.assertTrue(results.toString(), results.contains(new PartialPath("root.a.b.**")));
p = new PartialPath("root.**.b.**");
results = p.alterPrefixPath(new PartialPath("root.a.b.c"));
Assert.assertEquals(results.toString(), 2, results.size());
Assert.assertTrue(results.toString(), results.contains(new PartialPath("root.a.b.c")));
Assert.assertTrue(results.toString(), results.contains(new PartialPath("root.a.b.c.**")));
p = new PartialPath("root.**.b.**.b");
results = p.alterPrefixPath(new PartialPath("root.b.b.b"));
Assert.assertEquals(results.toString(), 2, results.size());
Assert.assertTrue(results.toString(), results.contains(new PartialPath("root.b.b.b.b")));
Assert.assertTrue(results.toString(), results.contains(new PartialPath("root.b.b.b.**.b")));
// Path cannot be altered.
p = new PartialPath("root.b.c.**");
results = p.alterPrefixPath(new PartialPath("root.a.b.c"));
Assert.assertEquals(results.toString(), 0, results.size());
}
@Test
public void testMatchFullPath() throws IllegalPathException {
PartialPath p1 = new PartialPath("root.sg1.d1.*");
Assert.assertTrue(p1.matchFullPath(new PartialPath("root.sg1.d1.s2")));
Assert.assertFalse(p1.matchFullPath(new PartialPath("root.sg1.d1")));
Assert.assertFalse(p1.matchFullPath(new PartialPath("root.sg2.d1.*")));
Assert.assertFalse(p1.matchFullPath(new PartialPath("", false)));
PartialPath path = new PartialPath("root.sg1.d1.s1");
String[] patterns1 = {
"root.sg1.d1.s1",
"root.sg1.*.s1",
"root.*.d1.*",
"root.*.*.*",
"root.s*.d1.s1",
"root.*g1.d1.s1",
"root.s*.d1.*",
"root.s*.d*.s*",
"root.**",
"root.**.s1",
"root.sg1.**",
};
for (String pattern : patterns1) {
Assert.assertTrue(new PartialPath(pattern).matchFullPath(path));
}
String[] patterns2 = {
"root2.sg1.d1.s1",
"root.sg1.*.s2",
"root.*.d2.s1",
"root.*.d*.s2",
"root.*.a*.s1",
"root.*",
"root.*.*",
"root.s*.d*.a*",
"root2.**",
"root.**.s2",
"root.**.d1",
"root.sg2.**",
};
for (String pattern : patterns2) {
Assert.assertFalse(new PartialPath(pattern).matchFullPath(path));
}
}
@Test
public void testMatchPrefixPath() throws IllegalPathException {
// ===
PartialPath pattern1 = new PartialPath("root.sg1.d1.*");
Assert.assertTrue(pattern1.matchPrefixPath(new PartialPath("", false)));
String[] prefixPathList11 = {"root.sg1.d1.s1", "root.sg1.d1", "root.sg1", "root"};
for (String prefixPath : prefixPathList11) {
Assert.assertTrue(pattern1.matchPrefixPath(new PartialPath(prefixPath)));
}
String[] prefixPathList12 = {
"root2.sg1.d1.s1",
"root.sg2.d1.s1",
"root.sg1.d2.s1",
"root.sg1.d2",
"root.sg2.d1",
"root.sg2",
"root2",
"root.sg1.d1.s1.o1"
};
for (String prefixPath : prefixPathList12) {
Assert.assertFalse(pattern1.matchPrefixPath(new PartialPath(prefixPath)));
}
// ===
PartialPath pattern2 = new PartialPath("root.*.d1.*");
for (String prefixPath : prefixPathList11) {
Assert.assertTrue(pattern2.matchPrefixPath(new PartialPath(prefixPath)));
}
String[] prefixPathList22 = {
"root2.sg1.d1.s1", "root.sg1.d2.s1", "root.sg1.d2", "root2.sg2", "root2"
};
for (String prefixPath : prefixPathList22) {
Assert.assertFalse(pattern2.matchPrefixPath(new PartialPath(prefixPath)));
}
// ==
PartialPath pattern3 = new PartialPath("root.sg1.*.*");
for (String prefixPath : prefixPathList11) {
Assert.assertTrue(pattern3.matchPrefixPath(new PartialPath(prefixPath)));
}
String[] prefixPathList32 = {
"root2.sg1.d1.s1", "root.sg2.d2.s1", "root.sg1.d1.s1.o1", "root.sg2", "root2"
};
for (String prefixPath : prefixPathList32) {
Assert.assertFalse(pattern3.matchPrefixPath(new PartialPath(prefixPath)));
}
// ==
PartialPath pattern4 = new PartialPath("root.**");
for (String prefixPath : prefixPathList11) {
Assert.assertTrue(pattern4.matchPrefixPath(new PartialPath(prefixPath)));
}
String[] prefixPathList42 = {"root2.sg1.d1.s1"};
for (String prefixPath : prefixPathList42) {
Assert.assertFalse(pattern4.matchPrefixPath(new PartialPath(prefixPath)));
}
// ==
PartialPath pattern5 = new PartialPath("root.**.d1");
String[] prefixPathList51 = {
"root.sg1.d1.s1", "root.sg1.d1", "root.sg1", "root", "root.sg1.d1.s1"
};
for (String prefixPath : prefixPathList51) {
Assert.assertTrue(pattern5.matchPrefixPath(new PartialPath(prefixPath)));
}
String[] prefixPathList52 = {"root2.sg1.d1.s1"};
for (String prefixPath : prefixPathList52) {
Assert.assertFalse(pattern5.matchPrefixPath(new PartialPath(prefixPath)));
}
// ==
PartialPath pattern6 = new PartialPath("root.sg1.**");
for (String prefixPath : prefixPathList11) {
Assert.assertTrue(pattern6.matchPrefixPath(new PartialPath(prefixPath)));
}
String[] prefixPathList62 = {"root2.sg1.d1.s1", "root.sg2.d1.s1"};
for (String prefixPath : prefixPathList62) {
Assert.assertFalse(pattern6.matchPrefixPath(new PartialPath(prefixPath)));
}
// ==
PartialPath pattern7 = new PartialPath("root.**.d1.**");
String[] prefixPathList71 = {
"root.sg1.d1.s1", "root.sg1.d1", "root.sg1", "root", "root.sg1.d2.s1"
};
for (String prefixPath : prefixPathList71) {
Assert.assertTrue(pattern7.matchPrefixPath(new PartialPath(prefixPath)));
}
String[] prefixPathList72 = {"root2.sg1.d1.s1"};
for (String prefixPath : prefixPathList72) {
Assert.assertFalse(pattern7.matchPrefixPath(new PartialPath(prefixPath)));
}
// ==
PartialPath pattern8 = new PartialPath("root.**.d1.*");
for (String prefixPath : prefixPathList71) {
Assert.assertTrue(pattern8.matchPrefixPath(new PartialPath(prefixPath)));
}
for (String prefixPath : prefixPathList72) {
Assert.assertFalse(pattern7.matchPrefixPath(new PartialPath(prefixPath)));
}
}
@Test
public void testPartialPathAndStringList() {
List<PartialPath> paths =
PartialPath.fromStringList(Arrays.asList("root.sg1.d1.s1", "root.sg1.d1.s2"));
Assert.assertEquals("root.sg1.d1.s1", paths.get(0).getFullPath());
Assert.assertEquals("root.sg1.d1.s2", paths.get(1).getFullPath());
List<String> stringPaths = PartialPath.toStringList(paths);
Assert.assertEquals("root.sg1.d1.s1", stringPaths.get(0));
Assert.assertEquals("root.sg1.d1.s2", stringPaths.get(1));
}
@Test
public void testOverlapWith() throws IllegalPathException {
PartialPath[][] pathPairs =
new PartialPath[][] {
new PartialPath[] {new PartialPath("root.**"), new PartialPath("root.sg.**")},
new PartialPath[] {new PartialPath("root.**.*"), new PartialPath("root.sg.**")},
new PartialPath[] {new PartialPath("root.**.s"), new PartialPath("root.sg.**")},
new PartialPath[] {new PartialPath("root.*.**"), new PartialPath("root.sg.**")},
new PartialPath[] {new PartialPath("root.*.d.s"), new PartialPath("root.**.s")},
new PartialPath[] {new PartialPath("root.*.d.s"), new PartialPath("root.sg.*.s")},
new PartialPath[] {new PartialPath("root.*.d.s"), new PartialPath("root.sg.d2.s")},
new PartialPath[] {new PartialPath("root.*.d.s.*"), new PartialPath("root.sg.d.s")},
new PartialPath[] {new PartialPath("root.**.d.s"), new PartialPath("root.**.d2.s")},
new PartialPath[] {new PartialPath("root.**.*.s"), new PartialPath("root.**.d2.s")},
new PartialPath[] {new PartialPath("root.**.d1.*"), new PartialPath("root.*")},
new PartialPath[] {new PartialPath("root.**.d1.*"), new PartialPath("root.d2.*.s")},
new PartialPath[] {new PartialPath("root.**.d1.**"), new PartialPath("root.d2.**")},
new PartialPath[] {
new PartialPath("root.**.*.**.**"), new PartialPath("root.d2.*.s1.**")
},
new PartialPath[] {new PartialPath("root.**.s1.d1"), new PartialPath("root.s1.d1.**")},
new PartialPath[] {new PartialPath("root.**.s1"), new PartialPath("root.**.s2.s1")},
new PartialPath[] {
new PartialPath("root.**.s1.s2.**"), new PartialPath("root.d1.s1.s2.*")
},
new PartialPath[] {new PartialPath("root.**.s1"), new PartialPath("root.**.s2")},
};
boolean[] results =
new boolean[] {
true, true, true, true, true, true, false, false, false, true, false, true, true, true,
true, true, true, false
};
Assert.assertEquals(pathPairs.length, results.length);
for (int i = 0; i < pathPairs.length; i++) {
Assert.assertEquals(results[i], pathPairs[i][0].overlapWith(pathPairs[i][1]));
}
}
@Test
public void testInclude() throws IllegalPathException {
PartialPath[][] pathPairs =
new PartialPath[][] {
new PartialPath[] {new PartialPath("root.**"), new PartialPath("root.sg.**")},
new PartialPath[] {new PartialPath("root.**.*"), new PartialPath("root.**")},
new PartialPath[] {new PartialPath("root.**.*"), new PartialPath("root.sg.**")},
new PartialPath[] {new PartialPath("root.**.s"), new PartialPath("root.sg.**")},
new PartialPath[] {new PartialPath("root.*.**"), new PartialPath("root.sg.**")},
new PartialPath[] {new PartialPath("root.*.d.s"), new PartialPath("root.sg1.d.s")},
new PartialPath[] {new PartialPath("root.**.s"), new PartialPath("root.*.d.s")},
new PartialPath[] {new PartialPath("root.*.d.s"), new PartialPath("root.**.s")},
new PartialPath[] {new PartialPath("root.*.d.s"), new PartialPath("root.sg.*.s")},
new PartialPath[] {new PartialPath("root.*.d.s"), new PartialPath("root.sg.d2.s")},
new PartialPath[] {new PartialPath("root.*.d.s.*"), new PartialPath("root.sg.d.s")},
new PartialPath[] {new PartialPath("root.**.d.s"), new PartialPath("root.**.d2.s")},
new PartialPath[] {new PartialPath("root.**.*.s"), new PartialPath("root.**.d2.s")},
new PartialPath[] {new PartialPath("root.**.d1.*"), new PartialPath("root.*")},
new PartialPath[] {new PartialPath("root.**.d1.*"), new PartialPath("root.d2.*.s")},
new PartialPath[] {new PartialPath("root.**.d1.**"), new PartialPath("root.d2.**")},
new PartialPath[] {
new PartialPath("root.**.*.**.**"), new PartialPath("root.d2.*.s1.**")
},
new PartialPath[] {new PartialPath("root.**.s1.d1"), new PartialPath("root.s1.d1.**")},
new PartialPath[] {new PartialPath("root.**.s1"), new PartialPath("root.**.s2.s1")},
new PartialPath[] {
new PartialPath("root.**.s1.s2.**"), new PartialPath("root.d1.s1.s2.*")
},
new PartialPath[] {new PartialPath("root.**.s1"), new PartialPath("root.**.s2")},
new PartialPath[] {new PartialPath("root.*.*.**"), new PartialPath("root.**.*")},
new PartialPath[] {new PartialPath("root.**.**"), new PartialPath("root.*.**.**.*")},
};
boolean[] results =
new boolean[] {
true, false, true, false, true, true, true, false, false, false, false, false, true,
false, false, false, true, false, true, true, false, false, true
};
Assert.assertEquals(pathPairs.length, results.length);
for (int i = 0; i < pathPairs.length; i++) {
Assert.assertEquals(results[i], pathPairs[i][0].include(pathPairs[i][1]));
}
}
@Test
public void testIntersectWithPrefixPattern() throws Exception {
checkIntersect(
new PartialPath("root.**.d*"),
new PartialPath("root.test.dac.device1.**"),
new HashSet<PartialPath>() {
{
add(new PartialPath("root.test.dac.device1.d*"));
add(new PartialPath("root.test.dac.device1.**.d*"));
}
});
checkIntersect(
new PartialPath("root.**.d*.**"),
new PartialPath("root.test.dac.device1.**"),
new HashSet<PartialPath>() {
{
add(new PartialPath("root.test.dac.device1.**"));
}
});
checkIntersect(
new PartialPath("root.**.d1.**"),
new PartialPath("root.sg1.d1.**"),
new HashSet<PartialPath>() {
{
add(new PartialPath("root.sg1.d1.**"));
}
});
checkIntersect(
new PartialPath("root.**.d1.s1"),
new PartialPath("root.sg1.d1.**"),
new HashSet<PartialPath>() {
{
add(new PartialPath("root.sg1.d1.s1"));
add(new PartialPath("root.sg1.d1.d1.s1"));
add(new PartialPath("root.sg1.d1.**.d1.s1"));
}
});
checkIntersect(
new PartialPath("root.**.d*"),
new PartialPath("root.sg1.d1.**"),
new HashSet<PartialPath>() {
{
add(new PartialPath("root.sg1.d1.d*"));
add(new PartialPath("root.sg1.d1.**.d*"));
}
});
checkIntersect(
new PartialPath("root.sg1.d1"), new PartialPath("root.sg1.d1.**"), Collections.emptySet());
checkIntersect(
new PartialPath("root.sg1.d2.s1"),
new PartialPath("root.sg1.d1.**"),
Collections.emptySet());
checkIntersect(
new PartialPath("root.*.d.s1"),
new PartialPath("root.db.d.**"),
new HashSet<PartialPath>() {
{
add(new PartialPath("root.db.d.s1"));
}
});
}
@Test
public void testToDeviceId() {
PartialPath partialPath = new PartialPath(new String[] {"root"});
IDeviceID deviceID = partialPath.getIDeviceID();
assertEquals(1, deviceID.segmentNum());
assertEquals("root", deviceID.segment(0));
assertEquals("root", deviceID.getTableName());
partialPath = new PartialPath(new String[] {"root", "a"});
deviceID = partialPath.getIDeviceID();
assertEquals(2, deviceID.segmentNum());
assertEquals("root", deviceID.segment(0));
assertEquals("a", deviceID.segment(1));
assertEquals("root", deviceID.getTableName());
partialPath = new PartialPath(new String[] {"root", "a", "b"});
deviceID = partialPath.getIDeviceID();
assertEquals(2, deviceID.segmentNum());
assertEquals("root.a", deviceID.segment(0));
assertEquals("b", deviceID.segment(1));
assertEquals("root.a", deviceID.getTableName());
partialPath = new PartialPath(new String[] {"root", "a", "b", "c"});
deviceID = partialPath.getIDeviceID();
assertEquals(2, deviceID.segmentNum());
assertEquals("root.a.b", deviceID.segment(0));
assertEquals("c", deviceID.segment(1));
assertEquals("root.a.b", deviceID.getTableName());
partialPath = new PartialPath(new String[] {"root", "a", "b", "c", "d"});
deviceID = partialPath.getIDeviceID();
assertEquals(3, deviceID.segmentNum());
assertEquals("root.a.b", deviceID.segment(0));
assertEquals("c", deviceID.segment(1));
assertEquals("d", deviceID.segment(2));
assertEquals("root.a.b", deviceID.getTableName());
}
@Test
public void testAlignedToDeviceId() throws IllegalPathException {
PartialPath partialPath = new AlignedPath("root.a", Collections.singletonList("s1"));
IDeviceID deviceID = partialPath.getIDeviceID();
assertEquals(2, deviceID.segmentNum());
assertEquals("root", deviceID.segment(0));
assertEquals("a", deviceID.segment(1));
assertEquals("root", deviceID.getTableName());
partialPath = new AlignedPath("root.a.b", Collections.singletonList("s1"));
deviceID = partialPath.getIDeviceID();
assertEquals(2, deviceID.segmentNum());
assertEquals("root.a", deviceID.segment(0));
assertEquals("b", deviceID.segment(1));
assertEquals("root.a", deviceID.getTableName());
partialPath = new AlignedPath("root.a.b.c", Collections.singletonList("s1"));
deviceID = partialPath.getIDeviceID();
assertEquals(2, deviceID.segmentNum());
assertEquals("root.a.b", deviceID.segment(0));
assertEquals("c", deviceID.segment(1));
assertEquals("root.a.b", deviceID.getTableName());
partialPath = new AlignedPath("root.a.b.c.d", Collections.singletonList("s1"));
deviceID = partialPath.getIDeviceID();
assertEquals(3, deviceID.segmentNum());
assertEquals("root.a.b", deviceID.segment(0));
assertEquals("c", deviceID.segment(1));
assertEquals("d", deviceID.segment(2));
assertEquals("root.a.b", deviceID.getTableName());
}
@Test
public void testMeasurementPathToDeviceId() throws IllegalPathException {
PartialPath partialPath = new MeasurementPath("root.a.s1");
IDeviceID deviceID = partialPath.getIDeviceID();
assertEquals(2, deviceID.segmentNum());
assertEquals("root", deviceID.segment(0));
assertEquals("a", deviceID.segment(1));
assertEquals("root", deviceID.getTableName());
partialPath = new MeasurementPath("root.a.b.s1");
deviceID = partialPath.getIDeviceID();
assertEquals(2, deviceID.segmentNum());
assertEquals("root.a", deviceID.segment(0));
assertEquals("b", deviceID.segment(1));
assertEquals("root.a", deviceID.getTableName());
partialPath = new MeasurementPath("root.a.b.c.s1");
deviceID = partialPath.getIDeviceID();
assertEquals(2, deviceID.segmentNum());
assertEquals("root.a.b", deviceID.segment(0));
assertEquals("c", deviceID.segment(1));
assertEquals("root.a.b", deviceID.getTableName());
partialPath = new MeasurementPath("root.a.b.c.d.s1");
deviceID = partialPath.getIDeviceID();
assertEquals(3, deviceID.segmentNum());
assertEquals("root.a.b", deviceID.segment(0));
assertEquals("c", deviceID.segment(1));
assertEquals("d", deviceID.segment(2));
assertEquals("root.a.b", deviceID.getTableName());
}
@Test
public void testSerialization() throws IllegalPathException, IOException {
PartialPath partialPath = new PartialPath("root.a.b.c.d.s1");
ByteBuffer buffer = partialPath.serialize();
PartialPath deserialized = (PartialPath) PathDeserializeUtil.deserialize(buffer);
assertEquals(partialPath, deserialized);
try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
partialPath.serialize(baos);
buffer = ByteBuffer.wrap(baos.toByteArray());
deserialized = (PartialPath) PathDeserializeUtil.deserialize(buffer);
assertEquals(partialPath, deserialized);
}
MeasurementPath measurementPath = new MeasurementPath("root.a.b.c.d.s1");
measurementPath.setMeasurementAlias("ss1");
measurementPath.setMeasurementSchema(new MeasurementSchema("s1", TSDataType.DOUBLE));
measurementPath.setTagMap(Collections.singletonMap("tag1", "tagValue1"));
buffer = measurementPath.serialize();
MeasurementPath deserializedMeasurementPath =
(MeasurementPath) PathDeserializeUtil.deserialize(buffer);
assertEquals(measurementPath, deserializedMeasurementPath);
try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
measurementPath.serialize(baos);
buffer = ByteBuffer.wrap(baos.toByteArray());
deserializedMeasurementPath = (MeasurementPath) PathDeserializeUtil.deserialize(buffer);
assertEquals(measurementPath, deserializedMeasurementPath);
}
AlignedPath alignedPath = new AlignedPath("root.a.b.c.d");
alignedPath.setMeasurementList(Arrays.asList("s1", "s2"));
alignedPath.setSchemaList(
Arrays.asList(
new MeasurementSchema("s1", TSDataType.DOUBLE),
new MeasurementSchema("s2", TSDataType.TEXT)));
buffer = alignedPath.serialize();
AlignedPath deserializedAlignedPath = (AlignedPath) PathDeserializeUtil.deserialize(buffer);
assertEquals(alignedPath, deserializedAlignedPath);
try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
alignedPath.serialize(baos);
buffer = ByteBuffer.wrap(baos.toByteArray());
deserializedAlignedPath = (AlignedPath) PathDeserializeUtil.deserialize(buffer);
assertEquals(alignedPath, deserializedAlignedPath);
}
}
private void checkIntersect(PartialPath pattern, PartialPath prefix, Set<PartialPath> expected) {
List<PartialPath> actual = pattern.intersectWithPrefixPattern(prefix);
for (PartialPath path : actual) {
if (!expected.contains(path)) {
System.out.println(path);
}
Assert.assertTrue(expected.remove(path));
}
Assert.assertTrue(expected.isEmpty());
}
private void checkNodes(String[] expected, String[] actual) {
Assert.assertEquals(expected.length, actual.length);
for (int i = 0; i < expected.length; i++) {
Assert.assertEquals(expected[i], actual[i]);
}
}
}
|
googleapis/google-cloud-java | 36,062 | java-run/proto-google-cloud-run-v2/src/main/java/com/google/cloud/run/v2/BinaryAuthorization.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/run/v2/vendor_settings.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.run.v2;
/**
*
*
* <pre>
* Settings for Binary Authorization feature.
* </pre>
*
* Protobuf type {@code google.cloud.run.v2.BinaryAuthorization}
*/
public final class BinaryAuthorization extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.run.v2.BinaryAuthorization)
BinaryAuthorizationOrBuilder {
private static final long serialVersionUID = 0L;
// Use BinaryAuthorization.newBuilder() to construct.
private BinaryAuthorization(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BinaryAuthorization() {
breakglassJustification_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BinaryAuthorization();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.run.v2.VendorSettingsProto
.internal_static_google_cloud_run_v2_BinaryAuthorization_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.run.v2.VendorSettingsProto
.internal_static_google_cloud_run_v2_BinaryAuthorization_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.run.v2.BinaryAuthorization.class,
com.google.cloud.run.v2.BinaryAuthorization.Builder.class);
}
private int binauthzMethodCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object binauthzMethod_;
public enum BinauthzMethodCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
USE_DEFAULT(1),
POLICY(3),
BINAUTHZMETHOD_NOT_SET(0);
private final int value;
private BinauthzMethodCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static BinauthzMethodCase valueOf(int value) {
return forNumber(value);
}
public static BinauthzMethodCase forNumber(int value) {
switch (value) {
case 1:
return USE_DEFAULT;
case 3:
return POLICY;
case 0:
return BINAUTHZMETHOD_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public BinauthzMethodCase getBinauthzMethodCase() {
return BinauthzMethodCase.forNumber(binauthzMethodCase_);
}
public static final int USE_DEFAULT_FIELD_NUMBER = 1;
/**
*
*
* <pre>
* Optional. If True, indicates to use the default project's binary
* authorization policy. If False, binary authorization will be disabled.
* </pre>
*
* <code>bool use_default = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the useDefault field is set.
*/
@java.lang.Override
public boolean hasUseDefault() {
return binauthzMethodCase_ == 1;
}
/**
*
*
* <pre>
* Optional. If True, indicates to use the default project's binary
* authorization policy. If False, binary authorization will be disabled.
* </pre>
*
* <code>bool use_default = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The useDefault.
*/
@java.lang.Override
public boolean getUseDefault() {
if (binauthzMethodCase_ == 1) {
return (java.lang.Boolean) binauthzMethod_;
}
return false;
}
public static final int POLICY_FIELD_NUMBER = 3;
/**
*
*
* <pre>
* Optional. The path to a binary authorization policy.
* Format: `projects/{project}/platforms/cloudRun/{policy-name}`
* </pre>
*
* <code>
* string policy = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return Whether the policy field is set.
*/
public boolean hasPolicy() {
return binauthzMethodCase_ == 3;
}
/**
*
*
* <pre>
* Optional. The path to a binary authorization policy.
* Format: `projects/{project}/platforms/cloudRun/{policy-name}`
* </pre>
*
* <code>
* string policy = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The policy.
*/
public java.lang.String getPolicy() {
java.lang.Object ref = "";
if (binauthzMethodCase_ == 3) {
ref = binauthzMethod_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (binauthzMethodCase_ == 3) {
binauthzMethod_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* Optional. The path to a binary authorization policy.
* Format: `projects/{project}/platforms/cloudRun/{policy-name}`
* </pre>
*
* <code>
* string policy = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for policy.
*/
public com.google.protobuf.ByteString getPolicyBytes() {
java.lang.Object ref = "";
if (binauthzMethodCase_ == 3) {
ref = binauthzMethod_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (binauthzMethodCase_ == 3) {
binauthzMethod_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int BREAKGLASS_JUSTIFICATION_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object breakglassJustification_ = "";
/**
*
*
* <pre>
* Optional. If present, indicates to use Breakglass using this justification.
* If use_default is False, then it must be empty.
* For more information on breakglass, see
* https://cloud.google.com/binary-authorization/docs/using-breakglass
* </pre>
*
* <code>string breakglass_justification = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The breakglassJustification.
*/
@java.lang.Override
public java.lang.String getBreakglassJustification() {
java.lang.Object ref = breakglassJustification_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
breakglassJustification_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. If present, indicates to use Breakglass using this justification.
* If use_default is False, then it must be empty.
* For more information on breakglass, see
* https://cloud.google.com/binary-authorization/docs/using-breakglass
* </pre>
*
* <code>string breakglass_justification = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for breakglassJustification.
*/
@java.lang.Override
public com.google.protobuf.ByteString getBreakglassJustificationBytes() {
java.lang.Object ref = breakglassJustification_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
breakglassJustification_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (binauthzMethodCase_ == 1) {
output.writeBool(1, (boolean) ((java.lang.Boolean) binauthzMethod_));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(breakglassJustification_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, breakglassJustification_);
}
if (binauthzMethodCase_ == 3) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, binauthzMethod_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (binauthzMethodCase_ == 1) {
size +=
com.google.protobuf.CodedOutputStream.computeBoolSize(
1, (boolean) ((java.lang.Boolean) binauthzMethod_));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(breakglassJustification_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, breakglassJustification_);
}
if (binauthzMethodCase_ == 3) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, binauthzMethod_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.run.v2.BinaryAuthorization)) {
return super.equals(obj);
}
com.google.cloud.run.v2.BinaryAuthorization other =
(com.google.cloud.run.v2.BinaryAuthorization) obj;
if (!getBreakglassJustification().equals(other.getBreakglassJustification())) return false;
if (!getBinauthzMethodCase().equals(other.getBinauthzMethodCase())) return false;
switch (binauthzMethodCase_) {
case 1:
if (getUseDefault() != other.getUseDefault()) return false;
break;
case 3:
if (!getPolicy().equals(other.getPolicy())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + BREAKGLASS_JUSTIFICATION_FIELD_NUMBER;
hash = (53 * hash) + getBreakglassJustification().hashCode();
switch (binauthzMethodCase_) {
case 1:
hash = (37 * hash) + USE_DEFAULT_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getUseDefault());
break;
case 3:
hash = (37 * hash) + POLICY_FIELD_NUMBER;
hash = (53 * hash) + getPolicy().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.run.v2.BinaryAuthorization parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.run.v2.BinaryAuthorization parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.run.v2.BinaryAuthorization parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.run.v2.BinaryAuthorization parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.run.v2.BinaryAuthorization parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.run.v2.BinaryAuthorization parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.run.v2.BinaryAuthorization parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.run.v2.BinaryAuthorization parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.run.v2.BinaryAuthorization parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.run.v2.BinaryAuthorization parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.run.v2.BinaryAuthorization parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.run.v2.BinaryAuthorization parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.run.v2.BinaryAuthorization prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Settings for Binary Authorization feature.
* </pre>
*
* Protobuf type {@code google.cloud.run.v2.BinaryAuthorization}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.run.v2.BinaryAuthorization)
com.google.cloud.run.v2.BinaryAuthorizationOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.run.v2.VendorSettingsProto
.internal_static_google_cloud_run_v2_BinaryAuthorization_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.run.v2.VendorSettingsProto
.internal_static_google_cloud_run_v2_BinaryAuthorization_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.run.v2.BinaryAuthorization.class,
com.google.cloud.run.v2.BinaryAuthorization.Builder.class);
}
// Construct using com.google.cloud.run.v2.BinaryAuthorization.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
breakglassJustification_ = "";
binauthzMethodCase_ = 0;
binauthzMethod_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.run.v2.VendorSettingsProto
.internal_static_google_cloud_run_v2_BinaryAuthorization_descriptor;
}
@java.lang.Override
public com.google.cloud.run.v2.BinaryAuthorization getDefaultInstanceForType() {
return com.google.cloud.run.v2.BinaryAuthorization.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.run.v2.BinaryAuthorization build() {
com.google.cloud.run.v2.BinaryAuthorization result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.run.v2.BinaryAuthorization buildPartial() {
com.google.cloud.run.v2.BinaryAuthorization result =
new com.google.cloud.run.v2.BinaryAuthorization(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.run.v2.BinaryAuthorization result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.breakglassJustification_ = breakglassJustification_;
}
}
private void buildPartialOneofs(com.google.cloud.run.v2.BinaryAuthorization result) {
result.binauthzMethodCase_ = binauthzMethodCase_;
result.binauthzMethod_ = this.binauthzMethod_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.run.v2.BinaryAuthorization) {
return mergeFrom((com.google.cloud.run.v2.BinaryAuthorization) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.run.v2.BinaryAuthorization other) {
if (other == com.google.cloud.run.v2.BinaryAuthorization.getDefaultInstance()) return this;
if (!other.getBreakglassJustification().isEmpty()) {
breakglassJustification_ = other.breakglassJustification_;
bitField0_ |= 0x00000004;
onChanged();
}
switch (other.getBinauthzMethodCase()) {
case USE_DEFAULT:
{
setUseDefault(other.getUseDefault());
break;
}
case POLICY:
{
binauthzMethodCase_ = 3;
binauthzMethod_ = other.binauthzMethod_;
onChanged();
break;
}
case BINAUTHZMETHOD_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
binauthzMethod_ = input.readBool();
binauthzMethodCase_ = 1;
break;
} // case 8
case 18:
{
breakglassJustification_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 18
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
binauthzMethodCase_ = 3;
binauthzMethod_ = s;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int binauthzMethodCase_ = 0;
private java.lang.Object binauthzMethod_;
public BinauthzMethodCase getBinauthzMethodCase() {
return BinauthzMethodCase.forNumber(binauthzMethodCase_);
}
public Builder clearBinauthzMethod() {
binauthzMethodCase_ = 0;
binauthzMethod_ = null;
onChanged();
return this;
}
private int bitField0_;
/**
*
*
* <pre>
* Optional. If True, indicates to use the default project's binary
* authorization policy. If False, binary authorization will be disabled.
* </pre>
*
* <code>bool use_default = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the useDefault field is set.
*/
public boolean hasUseDefault() {
return binauthzMethodCase_ == 1;
}
/**
*
*
* <pre>
* Optional. If True, indicates to use the default project's binary
* authorization policy. If False, binary authorization will be disabled.
* </pre>
*
* <code>bool use_default = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The useDefault.
*/
public boolean getUseDefault() {
if (binauthzMethodCase_ == 1) {
return (java.lang.Boolean) binauthzMethod_;
}
return false;
}
/**
*
*
* <pre>
* Optional. If True, indicates to use the default project's binary
* authorization policy. If False, binary authorization will be disabled.
* </pre>
*
* <code>bool use_default = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The useDefault to set.
* @return This builder for chaining.
*/
public Builder setUseDefault(boolean value) {
binauthzMethodCase_ = 1;
binauthzMethod_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. If True, indicates to use the default project's binary
* authorization policy. If False, binary authorization will be disabled.
* </pre>
*
* <code>bool use_default = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearUseDefault() {
if (binauthzMethodCase_ == 1) {
binauthzMethodCase_ = 0;
binauthzMethod_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. The path to a binary authorization policy.
* Format: `projects/{project}/platforms/cloudRun/{policy-name}`
* </pre>
*
* <code>
* string policy = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return Whether the policy field is set.
*/
@java.lang.Override
public boolean hasPolicy() {
return binauthzMethodCase_ == 3;
}
/**
*
*
* <pre>
* Optional. The path to a binary authorization policy.
* Format: `projects/{project}/platforms/cloudRun/{policy-name}`
* </pre>
*
* <code>
* string policy = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The policy.
*/
@java.lang.Override
public java.lang.String getPolicy() {
java.lang.Object ref = "";
if (binauthzMethodCase_ == 3) {
ref = binauthzMethod_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (binauthzMethodCase_ == 3) {
binauthzMethod_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The path to a binary authorization policy.
* Format: `projects/{project}/platforms/cloudRun/{policy-name}`
* </pre>
*
* <code>
* string policy = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for policy.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPolicyBytes() {
java.lang.Object ref = "";
if (binauthzMethodCase_ == 3) {
ref = binauthzMethod_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (binauthzMethodCase_ == 3) {
binauthzMethod_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The path to a binary authorization policy.
* Format: `projects/{project}/platforms/cloudRun/{policy-name}`
* </pre>
*
* <code>
* string policy = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The policy to set.
* @return This builder for chaining.
*/
public Builder setPolicy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
binauthzMethodCase_ = 3;
binauthzMethod_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The path to a binary authorization policy.
* Format: `projects/{project}/platforms/cloudRun/{policy-name}`
* </pre>
*
* <code>
* string policy = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearPolicy() {
if (binauthzMethodCase_ == 3) {
binauthzMethodCase_ = 0;
binauthzMethod_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. The path to a binary authorization policy.
* Format: `projects/{project}/platforms/cloudRun/{policy-name}`
* </pre>
*
* <code>
* string policy = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for policy to set.
* @return This builder for chaining.
*/
public Builder setPolicyBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
binauthzMethodCase_ = 3;
binauthzMethod_ = value;
onChanged();
return this;
}
private java.lang.Object breakglassJustification_ = "";
/**
*
*
* <pre>
* Optional. If present, indicates to use Breakglass using this justification.
* If use_default is False, then it must be empty.
* For more information on breakglass, see
* https://cloud.google.com/binary-authorization/docs/using-breakglass
* </pre>
*
* <code>string breakglass_justification = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The breakglassJustification.
*/
public java.lang.String getBreakglassJustification() {
java.lang.Object ref = breakglassJustification_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
breakglassJustification_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. If present, indicates to use Breakglass using this justification.
* If use_default is False, then it must be empty.
* For more information on breakglass, see
* https://cloud.google.com/binary-authorization/docs/using-breakglass
* </pre>
*
* <code>string breakglass_justification = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for breakglassJustification.
*/
public com.google.protobuf.ByteString getBreakglassJustificationBytes() {
java.lang.Object ref = breakglassJustification_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
breakglassJustification_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. If present, indicates to use Breakglass using this justification.
* If use_default is False, then it must be empty.
* For more information on breakglass, see
* https://cloud.google.com/binary-authorization/docs/using-breakglass
* </pre>
*
* <code>string breakglass_justification = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The breakglassJustification to set.
* @return This builder for chaining.
*/
public Builder setBreakglassJustification(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
breakglassJustification_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. If present, indicates to use Breakglass using this justification.
* If use_default is False, then it must be empty.
* For more information on breakglass, see
* https://cloud.google.com/binary-authorization/docs/using-breakglass
* </pre>
*
* <code>string breakglass_justification = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearBreakglassJustification() {
breakglassJustification_ = getDefaultInstance().getBreakglassJustification();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. If present, indicates to use Breakglass using this justification.
* If use_default is False, then it must be empty.
* For more information on breakglass, see
* https://cloud.google.com/binary-authorization/docs/using-breakglass
* </pre>
*
* <code>string breakglass_justification = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for breakglassJustification to set.
* @return This builder for chaining.
*/
public Builder setBreakglassJustificationBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
breakglassJustification_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.run.v2.BinaryAuthorization)
}
// @@protoc_insertion_point(class_scope:google.cloud.run.v2.BinaryAuthorization)
private static final com.google.cloud.run.v2.BinaryAuthorization DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.run.v2.BinaryAuthorization();
}
public static com.google.cloud.run.v2.BinaryAuthorization getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BinaryAuthorization> PARSER =
new com.google.protobuf.AbstractParser<BinaryAuthorization>() {
@java.lang.Override
public BinaryAuthorization parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BinaryAuthorization> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BinaryAuthorization> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.run.v2.BinaryAuthorization getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/harmony | 36,039 | classlib/modules/awt/src/main/java/common/org/apache/harmony/awt/gl/CommonGraphics2D.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Alexey A. Petrenko
*/
package org.apache.harmony.awt.gl;
import java.awt.AlphaComposite;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Composite;
import java.awt.Font;
import java.awt.FontMetrics;
import java.awt.Graphics2D;
import java.awt.GraphicsConfiguration;
import java.awt.Image;
import java.awt.Paint;
import java.awt.PaintContext;
import java.awt.Point;
import java.awt.Polygon;
import java.awt.Rectangle;
import java.awt.RenderingHints;
import java.awt.Shape;
import java.awt.Stroke;
import java.awt.Toolkit;
import java.awt.font.FontRenderContext;
import java.awt.font.GlyphVector;
import java.awt.geom.AffineTransform;
import java.awt.geom.Arc2D;
import java.awt.geom.Ellipse2D;
import java.awt.geom.Line2D;
import java.awt.geom.PathIterator;
import java.awt.geom.RoundRectangle2D;
import java.awt.image.AffineTransformOp;
import java.awt.image.BufferedImage;
import java.awt.image.BufferedImageOp;
import java.awt.image.ImageObserver;
import java.awt.image.Raster;
import java.awt.image.RenderedImage;
import java.awt.image.WritableRaster;
import java.awt.image.renderable.RenderableImage;
import java.text.AttributedCharacterIterator;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.apache.harmony.awt.gl.font.FontManager;
import org.apache.harmony.awt.gl.font.fontlib.FLTextRenderer;
import org.apache.harmony.awt.gl.image.OffscreenImage;
import org.apache.harmony.awt.gl.render.Blitter;
import org.apache.harmony.awt.gl.render.JavaArcRasterizer;
import org.apache.harmony.awt.gl.render.JavaLineRasterizer;
import org.apache.harmony.awt.gl.render.JavaShapeRasterizer;
import org.apache.harmony.awt.gl.render.JavaTextRenderer;
import org.apache.harmony.awt.gl.render.NullBlitter;
/*
* List of abstract methods to implement in subclusses
* Graphics.copyArea(int x, int y, int width, int height, int dx, int dy)
* Graphics.create()
* Graphics2D.getDeviceConfiguration()
* CommonGraphics2D.fillMultiRectAreaColor(MultiRectArea mra);
* CommonGraphics2D.fillMultiRectAreaPaint(MultiRectArea mra);
*/
/**
* CommonGraphics2D class is a super class for all system-dependent
* implementations. It implements major part of Graphics and Graphics2D
* abstract methods.
* <h2>CommonGraphics2D Class Internals</h2>
* <h3>Line and Shape Rasterizers</h3>
* <p>
* The CommonGraphics2D class splits all shapes into a set of rectangles
* to unify the drawing process for different operating systems and architectures.
* For this purpose Java 2D* uses the JavaShapeRasterizer and the JavaLineRasterizer
* classes from the org.apache.harmony.awt.gl.render package. The JavaShapeRasterizer
* class splits an object implementing a Shape interface into a set of rectangles and
* produces a MultiRectArea object. The JavaLineRasterizer class makes line drawing
* more accurate and processes lines with strokes, which are instances of the BasicStroke
* class.
* </p>
* <p>
* To port the shape drawing to another platform you just need to override
* rectangle-drawing methods. However, if your operating system has functions to draw
* particular shapes, you can optimize your subclass of the CommonGraphics2D class by
* using this functionality in overridden methods.
* </p>
* <h3>Blitters</h3>
* <p>
* Blitter classes draw images on the display or buffered images. All blitters inherit
* the org.apache.harmony.awt.gl.render.Blitter interface.
* </p>
* <p>Blitters are divided into:
* <ul>
* <li>Native blitters for simple types of images, which the underlying native library
* can draw.</li>
* <li>Java* blitters for those types of images, which the underlying native library
* cannot handle.</li>
* </ul></p>
* <p>
* DRL Java 2D* also uses blitters to fill the shapes and the user-defined subclasses
* of the java.awt.Paint class with paints, which the system does not support.
* </p>
*
*<h3>Text Renderers</h3>
*<p>
*Text renderers draw strings and glyph vectors. All text renderers are subclasses
*of the org.apache.harmony.awt.gl.TextRenderer class.
*</p>
*
*/
public abstract class CommonGraphics2D extends Graphics2D {
private static final Map<RenderingHints.Key, Object> DEFAULT_RENDERING_HINTS;
static {
final Map<RenderingHints.Key, Object> m = new HashMap<RenderingHints.Key, Object>();
m.put(RenderingHints.KEY_TEXT_ANTIALIASING,
RenderingHints.VALUE_TEXT_ANTIALIAS_DEFAULT);
m.put(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_OFF);
m.put(RenderingHints.KEY_STROKE_CONTROL,
RenderingHints.VALUE_STROKE_DEFAULT);
DEFAULT_RENDERING_HINTS = Collections.unmodifiableMap(m);
}
protected Surface dstSurf = null;
protected Blitter blitter = NullBlitter.getInstance();
protected RenderingHints hints = new RenderingHints(DEFAULT_RENDERING_HINTS);
// Clipping things
protected MultiRectArea clip = null;
protected Paint paint = Color.WHITE;
protected Color fgColor = Color.WHITE;
protected Color bgColor = Color.BLACK;
protected Composite composite = AlphaComposite.SrcOver;
protected Stroke stroke = new BasicStroke();
//TODO: Think more about FontRenderContext
protected FontRenderContext frc = null;
protected JavaShapeRasterizer jsr = new JavaShapeRasterizer();
protected Font font = new Font("Dialog", Font.PLAIN, 12);; //$NON-NLS-1$
protected TextRenderer jtr =
FontManager.IS_FONTLIB ?
FLTextRenderer.getInstance() :
JavaTextRenderer.inst;
// Current graphics transform
protected AffineTransform transform = new AffineTransform();
protected double[] matrix = new double[6];
// Original user->device translation as transform and point
//public AffineTransform origTransform = new AffineTransform();
public Point origPoint = new Point(0, 0);
// Print debug output or not
protected static final boolean debugOutput = "1".equals(org.apache.harmony.awt.Utils.getSystemProperty("g2d.debug")); //$NON-NLS-1$ //$NON-NLS-2$
// Constructors
protected CommonGraphics2D() {
}
protected CommonGraphics2D(int tx, int ty) {
this(tx, ty, null);
}
protected CommonGraphics2D(int tx, int ty, MultiRectArea clip) {
setTransform(AffineTransform.getTranslateInstance(tx, ty));
//origTransform = AffineTransform.getTranslateInstance(tx, ty);
origPoint = new Point(tx, ty);
setClip(clip);
}
// Public methods
@Override
public void addRenderingHints(Map<?,?> hints) {
this.hints.putAll(hints);
}
@Override
public void clearRect(int x, int y, int width, int height) {
Color c = getColor();
Paint p = getPaint();
setColor(getBackground());
fillRect(x, y, width, height);
setColor(c);
setPaint(p);
if (debugOutput) {
System.err.println("CommonGraphics2D.clearRect("+x+", "+y+", "+width+", "+height+")"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ //$NON-NLS-5$
}
}
@Override
public void clipRect(int x, int y, int width, int height) {
clip(new Rectangle(x, y, width, height));
}
@Override
public void clip(Shape s) {
if (s == null) {
clip = null;
return;
}
MultiRectArea mra = null;
if (s instanceof MultiRectArea) {
mra = new MultiRectArea((MultiRectArea)s);
mra.translate((int)transform.getTranslateX(), (int)transform.getTranslateY());
} else {
int type = transform.getType();
if(s instanceof Rectangle && (type == AffineTransform.TYPE_IDENTITY ||
type == AffineTransform.TYPE_TRANSLATION )) {
mra = new MultiRectArea((Rectangle)s);
if(type == AffineTransform.TYPE_TRANSLATION){
mra.translate((int)transform.getTranslateX(), (int)transform.getTranslateY());
}
} else {
s = transform.createTransformedShape(s);
mra = jsr.rasterize(s, 0.5);
}
}
if (clip == null) {
setTransformedClip(mra);
} else {
clip.intersect(mra);
setTransformedClip(clip);
}
}
@Override
public void dispose() {
// Do nothing for Java only classes
}
/***************************************************************************
*
* Draw methods
*
***************************************************************************/
@Override
public void draw(Shape s) {
if (stroke instanceof BasicStroke && ((BasicStroke)stroke).getLineWidth() <= 1) {
//TODO: Think about drawing the shape in one fillMultiRectArea call
BasicStroke bstroke = (BasicStroke)stroke;
JavaLineRasterizer.LineDasher ld = (bstroke.getDashArray() == null)?null:new JavaLineRasterizer.LineDasher(bstroke.getDashArray(), bstroke.getDashPhase());
PathIterator pi = s.getPathIterator(transform, 0.5);
float []points = new float[6];
int x1 = Integer.MIN_VALUE;
int y1 = Integer.MIN_VALUE;
int cx1 = Integer.MIN_VALUE;
int cy1 = Integer.MIN_VALUE;
while (!pi.isDone()) {
switch (pi.currentSegment(points)) {
case PathIterator.SEG_MOVETO:
x1 = (int)Math.floor(points[0]);
y1 = (int)Math.floor(points[1]);
cx1 = x1;
cy1 = y1;
break;
case PathIterator.SEG_LINETO:
int x2 = (int)Math.floor(points[0]);
int y2 = (int)Math.floor(points[1]);
fillMultiRectArea(JavaLineRasterizer.rasterize(x1, y1, x2, y2, null, ld, false));
x1 = x2;
y1 = y2;
break;
case PathIterator.SEG_CLOSE:
x2 = cx1;
y2 = cy1;
fillMultiRectArea(JavaLineRasterizer.rasterize(x1, y1, x2, y2, null, ld, false));
x1 = x2;
y1 = y2;
break;
}
pi.next();
}
} else {
s = stroke.createStrokedShape(s);
s = transform.createTransformedShape(s);
fillMultiRectArea(jsr.rasterize(s, 0.5));
}
}
@Override
public void drawArc(int x, int y, int width, int height, int sa, int ea) {
if (stroke instanceof BasicStroke && ((BasicStroke)stroke).getLineWidth() <= 1 &&
((BasicStroke)stroke).getDashArray() == null &&
(transform.isIdentity() || transform.getType() == AffineTransform.TYPE_TRANSLATION)) {
Point p = new Point(x, y);
transform.transform(p, p);
MultiRectArea mra = JavaArcRasterizer.rasterize(x, y, width, height, sa, ea, clip);
fillMultiRectArea(mra);
return;
}
draw(new Arc2D.Float(x, y, width, height, sa, ea, Arc2D.OPEN));
}
@Override
public boolean drawImage(Image image, int x, int y, Color bgcolor,
ImageObserver imageObserver) {
if(image == null) {
return true;
}
boolean done = false;
boolean somebits = false;
Surface srcSurf = null;
if(image instanceof OffscreenImage){
OffscreenImage oi = (OffscreenImage) image;
if((oi.getState() & ImageObserver.ERROR) != 0) {
return false;
}
done = oi.prepareImage(imageObserver);
somebits = (oi.getState() & ImageObserver.SOMEBITS) != 0;
srcSurf = oi.getImageSurface();
}else{
done = true;
srcSurf = Surface.getImageSurface(image);
}
if(done || somebits) {
int w = srcSurf.getWidth();
int h = srcSurf.getHeight();
blitter.blit(0, 0, srcSurf, x, y, dstSurf, w, h, (AffineTransform) transform.clone(),
composite, bgcolor, clip);
}
return done;
}
@Override
public boolean drawImage(Image image, int x, int y, ImageObserver imageObserver) {
return drawImage(image, x, y, null, imageObserver);
}
@Override
public boolean drawImage(Image image, int x, int y, int width, int height,
Color bgcolor, ImageObserver imageObserver) {
if(image == null) {
return true;
}
if(width == 0 || height == 0) {
return true;
}
boolean done = false;
boolean somebits = false;
Surface srcSurf = null;
if(image instanceof OffscreenImage){
OffscreenImage oi = (OffscreenImage) image;
if((oi.getState() & ImageObserver.ERROR) != 0) {
return false;
}
done = oi.prepareImage(imageObserver);
somebits = (oi.getState() & ImageObserver.SOMEBITS) != 0;
srcSurf = oi.getImageSurface();
}else{
done = true;
srcSurf = Surface.getImageSurface(image);
}
if(done || somebits) {
int w = srcSurf.getWidth();
int h = srcSurf.getHeight();
if(w == width && h == height){
blitter.blit(0, 0, srcSurf, x, y, dstSurf, w, h,
(AffineTransform) transform.clone(),
composite, bgcolor, clip);
}else{
AffineTransform xform = new AffineTransform();
xform.setToScale((float)width / w, (float)height / h);
blitter.blit(0, 0, srcSurf, x, y, dstSurf, w, h,
(AffineTransform) transform.clone(),
xform, composite, bgcolor, clip);
}
}
return done;
}
@Override
public boolean drawImage(Image image, int x, int y, int width, int height,
ImageObserver imageObserver) {
return drawImage(image, x, y, width, height, null, imageObserver);
}
@Override
public boolean drawImage(Image image, int dx1, int dy1, int dx2, int dy2,
int sx1, int sy1, int sx2, int sy2, Color bgcolor,
ImageObserver imageObserver) {
if(image == null) {
return true;
}
if(dx1 == dx2 || dy1 == dy2 || sx1 == sx2 || sy1 == sy2) {
return true;
}
boolean done = false;
boolean somebits = false;
Surface srcSurf = null;
if(image instanceof OffscreenImage){
OffscreenImage oi = (OffscreenImage) image;
if((oi.getState() & ImageObserver.ERROR) != 0) {
return false;
}
done = oi.prepareImage(imageObserver);
somebits = (oi.getState() & ImageObserver.SOMEBITS) != 0;
srcSurf = oi.getImageSurface();
}else{
done = true;
srcSurf = Surface.getImageSurface(image);
}
if(done || somebits) {
int dstX = dx1;
int dstY = dy1;
int srcX = sx1;
int srcY = sy1;
int dstW = dx2 - dx1;
int dstH = dy2 - dy1;
int srcW = sx2 - sx1;
int srcH = sy2 - sy1;
if(srcW == dstW && srcH == dstH){
blitter.blit(srcX, srcY, srcSurf, dstX, dstY, dstSurf, srcW, srcH,
(AffineTransform) transform.clone(),
composite, bgcolor, clip);
}else{
AffineTransform xform = new AffineTransform();
xform.setToScale((float)dstW / srcW, (float)dstH / srcH);
blitter.blit(srcX, srcY, srcSurf, dstX, dstY, dstSurf, srcW, srcH,
(AffineTransform) transform.clone(),
xform, composite, bgcolor, clip);
}
}
return done;
}
@Override
public boolean drawImage(Image image, int dx1, int dy1, int dx2, int dy2,
int sx1, int sy1, int sx2, int sy2, ImageObserver imageObserver) {
return drawImage(image, dx1, dy1, dx2, dy2, sx1, sy1, sx2, sy2, null,
imageObserver);
}
@Override
public void drawImage(BufferedImage bufImage, BufferedImageOp op,
int x, int y) {
if(bufImage == null) {
return;
}
if(op == null) {
drawImage(bufImage, x, y, null);
} else if(op instanceof AffineTransformOp){
AffineTransformOp atop = (AffineTransformOp) op;
AffineTransform xform = atop.getTransform();
Surface srcSurf = Surface.getImageSurface(bufImage);
int w = srcSurf.getWidth();
int h = srcSurf.getHeight();
blitter.blit(0, 0, srcSurf, x, y, dstSurf, w, h,
(AffineTransform) transform.clone(), xform,
composite, null, clip);
} else {
bufImage = op.filter(bufImage, null);
Surface srcSurf = Surface.getImageSurface(bufImage);
int w = srcSurf.getWidth();
int h = srcSurf.getHeight();
blitter.blit(0, 0, srcSurf, x, y, dstSurf, w, h,
(AffineTransform) transform.clone(),
composite, null, clip);
}
}
@Override
public boolean drawImage(Image image, AffineTransform trans,
ImageObserver imageObserver) {
if(image == null) {
return true;
}
if(trans == null || trans.isIdentity()) {
return drawImage(image, 0, 0, imageObserver);
}
boolean done = false;
boolean somebits = false;
Surface srcSurf = null;
if(image instanceof OffscreenImage){
OffscreenImage oi = (OffscreenImage) image;
if((oi.getState() & ImageObserver.ERROR) != 0) {
return false;
}
done = oi.prepareImage(imageObserver);
somebits = (oi.getState() & ImageObserver.SOMEBITS) != 0;
srcSurf = oi.getImageSurface();
}else{
done = true;
srcSurf = Surface.getImageSurface(image);
}
if(done || somebits) {
int w = srcSurf.getWidth();
int h = srcSurf.getHeight();
AffineTransform xform = (AffineTransform) transform.clone();
xform.concatenate(trans);
blitter.blit(0, 0, srcSurf, 0, 0, dstSurf, w, h, xform, composite,
null, clip);
}
return done;
}
@Override
public void drawLine(int x1, int y1, int x2, int y2) {
if (debugOutput) {
System.err.println("CommonGraphics2D.drawLine("+x1+", "+y1+", "+x2+", "+y2+")"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ //$NON-NLS-5$
}
if (stroke instanceof BasicStroke && ((BasicStroke)stroke).getLineWidth() <= 1) {
BasicStroke bstroke = (BasicStroke)stroke;
Point p1 = new Point(x1, y1);
Point p2 = new Point(x2, y2);
transform.transform(p1, p1);
transform.transform(p2, p2);
JavaLineRasterizer.LineDasher ld = (bstroke.getDashArray() == null)?null:new JavaLineRasterizer.LineDasher(bstroke.getDashArray(), bstroke.getDashPhase());
MultiRectArea mra = JavaLineRasterizer.rasterize(p1.x, p1.y, p2.x, p2.y, null, ld, false);
fillMultiRectArea(mra);
return;
}
draw(new Line2D.Float(x1, y1, x2, y2));
}
@Override
public void drawOval(int x, int y, int width, int height) {
if (stroke instanceof BasicStroke && ((BasicStroke)stroke).getLineWidth() <= 1 &&
((BasicStroke)stroke).getDashArray() == null &&
(transform.isIdentity() || transform.getType() == AffineTransform.TYPE_TRANSLATION)) {
Point p = new Point(x, y);
transform.transform(p, p);
MultiRectArea mra = JavaArcRasterizer.rasterize(p.x, p.y, width, height, 0, 360, clip);
fillMultiRectArea(mra);
return;
}
draw(new Ellipse2D.Float(x, y, width, height));
}
@Override
public void drawPolygon(int[] xpoints, int[] ypoints, int npoints) {
draw(new Polygon(xpoints, ypoints, npoints));
}
@Override
public void drawPolygon(Polygon polygon) {
draw(polygon);
}
@Override
public void drawPolyline(int[] xpoints, int[] ypoints, int npoints) {
for (int i = 0; i < npoints-1; i++) {
drawLine(xpoints[i], ypoints[i], xpoints[i+1], ypoints[i+1]);
}
}
@Override
public void drawRenderableImage(RenderableImage img, AffineTransform xform) {
if (img == null) {
return;
}
double scaleX = xform.getScaleX();
double scaleY = xform.getScaleY();
if (scaleX == 1 && scaleY == 1) {
drawRenderedImage(img.createDefaultRendering(), xform);
} else {
int width = (int)Math.round(img.getWidth()*scaleX);
int height = (int)Math.round(img.getHeight()*scaleY);
xform = (AffineTransform)xform.clone();
xform.scale(1, 1);
drawRenderedImage(img.createScaledRendering(width, height, null), xform);
}
}
@Override
public void drawRenderedImage(RenderedImage rimg, AffineTransform xform) {
if (rimg == null) {
return;
}
Image img = null;
if (rimg instanceof Image) {
img = (Image)rimg;
} else {
//TODO: Create new class to provide Image interface for RenderedImage or rewrite this method
img = new BufferedImage(rimg.getColorModel(), rimg.copyData(null), false, null);
}
drawImage(img, xform, null);
}
@Override
public void drawRoundRect(int x, int y, int width, int height, int arcWidth, int arcHeight) {
if (debugOutput) {
System.err.println("CommonGraphics2D.drawRoundRect("+x+", "+y+", "+width+", "+height+","+arcWidth+", "+arcHeight+")"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ //$NON-NLS-5$ //$NON-NLS-6$ //$NON-NLS-7$
}
draw(new RoundRectangle2D.Float(x, y, width, height, arcWidth, arcHeight));
}
/***************************************************************************
*
* String methods
*
***************************************************************************/
@Override
public void drawString(AttributedCharacterIterator iterator, float x, float y) {
GlyphVector gv = font.createGlyphVector(frc, iterator);
drawGlyphVector(gv, x, y);
}
@Override
public void drawString(AttributedCharacterIterator iterator, int x, int y) {
drawString(iterator, (float)x, (float)y);
}
@Override
public void drawString(String str, int x, int y) {
drawString(str, (float)x, (float)y);
}
/***************************************************************************
*
* Fill methods
*
***************************************************************************/
@Override
public void fill(Shape s) {
s = transform.createTransformedShape(s);
MultiRectArea mra = jsr.rasterize(s, 0.5);
fillMultiRectArea(mra);
}
@Override
public void fillArc(int x, int y, int width, int height, int sa, int ea) {
fill(new Arc2D.Float(x, y, width, height, sa, ea, Arc2D.PIE));
}
@Override
public void fillOval(int x, int y, int width, int height) {
fill(new Ellipse2D.Float(x, y, width, height));
}
@Override
public void fillPolygon(int[] xpoints, int[] ypoints, int npoints) {
fill(new Polygon(xpoints, ypoints, npoints));
}
@Override
public void fillPolygon(Polygon polygon) {
fill(polygon);
}
@Override
public void fillRect(int x, int y, int width, int height) {
if (debugOutput) {
System.err.println("CommonGraphics2D.fillRect("+x+", "+y+", "+width+", "+height+")"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ //$NON-NLS-5$
}
fill(new Rectangle(x, y, width, height));
}
@Override
public void fillRoundRect(int x, int y, int width, int height, int arcWidth, int arcHeight) {
if (debugOutput) {
System.err.println("CommonGraphics2D.fillRoundRect("+x+", "+y+", "+width+", "+height+","+arcWidth+", "+arcHeight+")"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ //$NON-NLS-5$ //$NON-NLS-6$ //$NON-NLS-7$
}
fill(new RoundRectangle2D.Float(x, y, width, height, arcWidth, arcHeight));
}
/***************************************************************************
*
* Get methods
*
***************************************************************************/
@Override
public Color getBackground() {
return bgColor;
}
@Override
public Shape getClip() {
if (clip == null) {
return null;
}
MultiRectArea res = new MultiRectArea(clip);
res.translate(-Math.round((float)transform.getTranslateX()), -Math.round((float)transform.getTranslateY()));
return res;
}
@Override
public Rectangle getClipBounds() {
if (clip == null) {
return null;
}
Rectangle res = (Rectangle) clip.getBounds().clone();
res.translate(-Math.round((float)transform.getTranslateX()), -Math.round((float)transform.getTranslateY()));
return res;
}
@Override
public Color getColor() {
return fgColor;
}
@Override
public Composite getComposite() {
return composite;
}
@Override
public Font getFont() {
return font;
}
@SuppressWarnings("deprecation")
@Override
public FontMetrics getFontMetrics(Font font) {
return Toolkit.getDefaultToolkit().getFontMetrics(font);
}
@Override
public FontRenderContext getFontRenderContext() {
AffineTransform at;
if (frc == null){
GraphicsConfiguration gc = getDeviceConfiguration();
if (gc != null){
at = gc.getDefaultTransform();
at.concatenate(gc.getNormalizingTransform());
}
else
at = null;
boolean isAa = (hints.get(RenderingHints.KEY_TEXT_ANTIALIASING) ==
RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
boolean isFm = (hints.get(RenderingHints.KEY_FRACTIONALMETRICS) ==
RenderingHints.VALUE_FRACTIONALMETRICS_ON);
frc = new FontRenderContext(at,isAa,isFm);
}
return frc;
}
@Override
public Paint getPaint() {
return paint;
}
@Override
public Object getRenderingHint(RenderingHints.Key key) {
return hints.get(key);
}
@Override
public RenderingHints getRenderingHints() {
return hints;
}
@Override
public Stroke getStroke() {
return stroke;
}
@Override
public AffineTransform getTransform() {
return (AffineTransform)transform.clone();
}
@Override
public boolean hit(Rectangle rect, Shape s, boolean onStroke) {
//TODO: Implement method....
return false;
}
/***************************************************************************
*
* Transformation methods
*
***************************************************************************/
@Override
public void rotate(double theta) {
transform.rotate(theta);
transform.getMatrix(matrix);
}
@Override
public void rotate(double theta, double x, double y) {
transform.rotate(theta, x, y);
transform.getMatrix(matrix);
}
@Override
public void scale(double sx, double sy) {
transform.scale(sx, sy);
transform.getMatrix(matrix);
}
@Override
public void shear(double shx, double shy) {
transform.shear(shx, shy);
transform.getMatrix(matrix);
}
@Override
public void transform(AffineTransform at) {
transform.concatenate(at);
transform.getMatrix(matrix);
}
@Override
public void translate(double tx, double ty) {
if (debugOutput) {
System.err.println("CommonGraphics2D.translate("+tx+", "+ty+")"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
}
transform.translate(tx, ty);
transform.getMatrix(matrix);
}
@Override
public void translate(int tx, int ty) {
if (debugOutput) {
System.err.println("CommonGraphics2D.translate("+tx+", "+ty+")"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
}
transform.translate(tx, ty);
transform.getMatrix(matrix);
}
/***************************************************************************
*
* Set methods
*
***************************************************************************/
@Override
public void setBackground(Color color) {
bgColor = color;
}
@Override
public void setClip(int x, int y, int width, int height) {
setClip(new Rectangle(x, y, width, height));
}
@Override
public void setClip(Shape s) {
if (s == null) {
setTransformedClip(null);
if (debugOutput) {
System.err.println("CommonGraphics2D.setClip(null)"); //$NON-NLS-1$
}
return;
}
if (debugOutput) {
System.err.println("CommonGraphics2D.setClip("+s.getBounds()+")"); //$NON-NLS-1$ //$NON-NLS-2$
}
if (s instanceof MultiRectArea) {
MultiRectArea nclip = new MultiRectArea((MultiRectArea)s);
nclip.translate(Math.round((float)transform.getTranslateX()), Math.round((float)transform.getTranslateY()));
setTransformedClip(nclip);
} else {
int type = transform.getType();
if(s instanceof Rectangle && (type == AffineTransform.TYPE_IDENTITY ||
type == AffineTransform.TYPE_TRANSLATION )) {
MultiRectArea nclip = new MultiRectArea((Rectangle)s);
if(type == AffineTransform.TYPE_TRANSLATION){
nclip.translate((int)transform.getTranslateX(), (int)transform.getTranslateY());
}
setTransformedClip(nclip);
} else {
s = transform.createTransformedShape(s);
setTransformedClip(jsr.rasterize(s, 0.5));
}
}
}
@Override
public void setColor(Color color) {
if (color != null) {
fgColor = color;
paint = color;
}
}
@Override
public void setComposite(Composite composite) {
this.composite = composite;
}
@Override
public void setFont(Font font) {
this.font = font;
}
@Override
public void setPaint(Paint paint) {
if (paint == null)
return;
this.paint = paint;
if (paint instanceof Color) {
fgColor = (Color)paint;
}
}
@Override
public void setPaintMode() {
composite = AlphaComposite.SrcOver;
}
@Override
public void setRenderingHint(RenderingHints.Key key, Object value) {
hints.put(key, value);
}
@Override
public void setRenderingHints(Map<?,?> hints) {
this.hints.clear();
this.hints.putAll(DEFAULT_RENDERING_HINTS);
this.hints.putAll(hints);
}
@Override
public void setStroke(Stroke stroke) {
this.stroke = stroke;
}
@Override
public void setTransform(AffineTransform transform) {
this.transform = transform;
transform.getMatrix(matrix);
}
@Override
public void setXORMode(Color color) {
composite = new XORComposite(color);
}
// Protected methods
protected void setTransformedClip(MultiRectArea clip) {
this.clip = clip;
}
/**
* This method fills the given MultiRectArea with current paint.
* It calls fillMultiRectAreaColor and fillMultiRectAreaPaint
* methods depending on the type of current paint.
* @param mra MultiRectArea to fill
*/
protected void fillMultiRectArea(MultiRectArea mra) {
if (clip != null) {
mra.intersect(clip);
}
// Return if all stuff is clipped
if (mra.rect[0] < 5) {
return;
}
if (debugOutput) {
System.err.println("CommonGraphics2D.fillMultiRectArea("+mra+")"); //$NON-NLS-1$ //$NON-NLS-2$
}
if (paint instanceof Color){
fillMultiRectAreaColor(mra);
}else{
fillMultiRectAreaPaint(mra);
}
}
/**
* This method fills the given MultiRectArea with solid color.
* @param mra MultiRectArea to fill
*/
protected void fillMultiRectAreaColor(MultiRectArea mra) {
fillMultiRectAreaPaint(mra);
}
/**
* This method fills the given MultiRectArea with any paint.
* @param mra MultiRectArea to fill
*/
protected void fillMultiRectAreaPaint(MultiRectArea mra) {
Rectangle rec = mra.getBounds();
int x = rec.x;
int y = rec.y;
int w = rec.width;
int h = rec.height;
if(w <= 0 || h <= 0) {
return;
}
PaintContext pc = paint.createContext(null, rec, rec, transform, hints);
Raster r = pc.getRaster(x, y, w, h);
WritableRaster wr;
if(r instanceof WritableRaster){
wr = (WritableRaster) r;
}else{
wr = r.createCompatibleWritableRaster();
wr.setRect(r);
}
Surface srcSurf = new ImageSurface(pc.getColorModel(), wr);
blitter.blit(0, 0, srcSurf, x, y, dstSurf, w, h,
composite, null, mra);
srcSurf.dispose();
}
/**
* Copies graphics class fields.
* Used in create method
*
* @param copy Graphics class to copy
*/
protected void copyInternalFields(CommonGraphics2D copy) {
if (clip == null) {
copy.setTransformedClip(null);
} else {
copy.setTransformedClip(new MultiRectArea(clip));
}
copy.setBackground(bgColor);
copy.setColor(fgColor);
copy.setPaint(paint);
copy.setComposite(composite);
copy.setStroke(stroke);
copy.setFont(font);
copy.setTransform(new AffineTransform(transform));
//copy.origTransform = new AffineTransform(origTransform);
copy.origPoint = new Point(origPoint);
}
public void flush(){}
}
|
googleapis/google-cloud-java | 36,127 | java-netapp/proto-google-cloud-netapp-v1/src/main/java/com/google/cloud/netapp/v1/UpdateKmsConfigRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/netapp/v1/kms.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.netapp.v1;
/**
*
*
* <pre>
* UpdateKmsConfigRequest updates a KMS Config.
* </pre>
*
* Protobuf type {@code google.cloud.netapp.v1.UpdateKmsConfigRequest}
*/
public final class UpdateKmsConfigRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.netapp.v1.UpdateKmsConfigRequest)
UpdateKmsConfigRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateKmsConfigRequest.newBuilder() to construct.
private UpdateKmsConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateKmsConfigRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateKmsConfigRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.netapp.v1.KmsProto
.internal_static_google_cloud_netapp_v1_UpdateKmsConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.netapp.v1.KmsProto
.internal_static_google_cloud_netapp_v1_UpdateKmsConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.netapp.v1.UpdateKmsConfigRequest.class,
com.google.cloud.netapp.v1.UpdateKmsConfigRequest.Builder.class);
}
private int bitField0_;
public static final int UPDATE_MASK_FIELD_NUMBER = 1;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* KmsConfig resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* KmsConfig resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* KmsConfig resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int KMS_CONFIG_FIELD_NUMBER = 2;
private com.google.cloud.netapp.v1.KmsConfig kmsConfig_;
/**
*
*
* <pre>
* Required. The KmsConfig being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.KmsConfig kms_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the kmsConfig field is set.
*/
@java.lang.Override
public boolean hasKmsConfig() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The KmsConfig being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.KmsConfig kms_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The kmsConfig.
*/
@java.lang.Override
public com.google.cloud.netapp.v1.KmsConfig getKmsConfig() {
return kmsConfig_ == null
? com.google.cloud.netapp.v1.KmsConfig.getDefaultInstance()
: kmsConfig_;
}
/**
*
*
* <pre>
* Required. The KmsConfig being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.KmsConfig kms_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.netapp.v1.KmsConfigOrBuilder getKmsConfigOrBuilder() {
return kmsConfig_ == null
? com.google.cloud.netapp.v1.KmsConfig.getDefaultInstance()
: kmsConfig_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getKmsConfig());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getKmsConfig());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.netapp.v1.UpdateKmsConfigRequest)) {
return super.equals(obj);
}
com.google.cloud.netapp.v1.UpdateKmsConfigRequest other =
(com.google.cloud.netapp.v1.UpdateKmsConfigRequest) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (hasKmsConfig() != other.hasKmsConfig()) return false;
if (hasKmsConfig()) {
if (!getKmsConfig().equals(other.getKmsConfig())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
if (hasKmsConfig()) {
hash = (37 * hash) + KMS_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getKmsConfig().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.netapp.v1.UpdateKmsConfigRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.netapp.v1.UpdateKmsConfigRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.netapp.v1.UpdateKmsConfigRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.netapp.v1.UpdateKmsConfigRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.netapp.v1.UpdateKmsConfigRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.netapp.v1.UpdateKmsConfigRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.netapp.v1.UpdateKmsConfigRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.netapp.v1.UpdateKmsConfigRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.netapp.v1.UpdateKmsConfigRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.netapp.v1.UpdateKmsConfigRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.netapp.v1.UpdateKmsConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.netapp.v1.UpdateKmsConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.netapp.v1.UpdateKmsConfigRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* UpdateKmsConfigRequest updates a KMS Config.
* </pre>
*
* Protobuf type {@code google.cloud.netapp.v1.UpdateKmsConfigRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.netapp.v1.UpdateKmsConfigRequest)
com.google.cloud.netapp.v1.UpdateKmsConfigRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.netapp.v1.KmsProto
.internal_static_google_cloud_netapp_v1_UpdateKmsConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.netapp.v1.KmsProto
.internal_static_google_cloud_netapp_v1_UpdateKmsConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.netapp.v1.UpdateKmsConfigRequest.class,
com.google.cloud.netapp.v1.UpdateKmsConfigRequest.Builder.class);
}
// Construct using com.google.cloud.netapp.v1.UpdateKmsConfigRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUpdateMaskFieldBuilder();
getKmsConfigFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
kmsConfig_ = null;
if (kmsConfigBuilder_ != null) {
kmsConfigBuilder_.dispose();
kmsConfigBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.netapp.v1.KmsProto
.internal_static_google_cloud_netapp_v1_UpdateKmsConfigRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.netapp.v1.UpdateKmsConfigRequest getDefaultInstanceForType() {
return com.google.cloud.netapp.v1.UpdateKmsConfigRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.netapp.v1.UpdateKmsConfigRequest build() {
com.google.cloud.netapp.v1.UpdateKmsConfigRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.netapp.v1.UpdateKmsConfigRequest buildPartial() {
com.google.cloud.netapp.v1.UpdateKmsConfigRequest result =
new com.google.cloud.netapp.v1.UpdateKmsConfigRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.netapp.v1.UpdateKmsConfigRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.kmsConfig_ = kmsConfigBuilder_ == null ? kmsConfig_ : kmsConfigBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.netapp.v1.UpdateKmsConfigRequest) {
return mergeFrom((com.google.cloud.netapp.v1.UpdateKmsConfigRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.netapp.v1.UpdateKmsConfigRequest other) {
if (other == com.google.cloud.netapp.v1.UpdateKmsConfigRequest.getDefaultInstance())
return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.hasKmsConfig()) {
mergeKmsConfig(other.getKmsConfig());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getKmsConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* KmsConfig resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* KmsConfig resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* KmsConfig resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* KmsConfig resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* KmsConfig resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* KmsConfig resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000001);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* KmsConfig resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* KmsConfig resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* KmsConfig resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.cloud.netapp.v1.KmsConfig kmsConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.netapp.v1.KmsConfig,
com.google.cloud.netapp.v1.KmsConfig.Builder,
com.google.cloud.netapp.v1.KmsConfigOrBuilder>
kmsConfigBuilder_;
/**
*
*
* <pre>
* Required. The KmsConfig being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.KmsConfig kms_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the kmsConfig field is set.
*/
public boolean hasKmsConfig() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The KmsConfig being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.KmsConfig kms_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The kmsConfig.
*/
public com.google.cloud.netapp.v1.KmsConfig getKmsConfig() {
if (kmsConfigBuilder_ == null) {
return kmsConfig_ == null
? com.google.cloud.netapp.v1.KmsConfig.getDefaultInstance()
: kmsConfig_;
} else {
return kmsConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The KmsConfig being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.KmsConfig kms_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setKmsConfig(com.google.cloud.netapp.v1.KmsConfig value) {
if (kmsConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
kmsConfig_ = value;
} else {
kmsConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The KmsConfig being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.KmsConfig kms_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setKmsConfig(com.google.cloud.netapp.v1.KmsConfig.Builder builderForValue) {
if (kmsConfigBuilder_ == null) {
kmsConfig_ = builderForValue.build();
} else {
kmsConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The KmsConfig being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.KmsConfig kms_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeKmsConfig(com.google.cloud.netapp.v1.KmsConfig value) {
if (kmsConfigBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& kmsConfig_ != null
&& kmsConfig_ != com.google.cloud.netapp.v1.KmsConfig.getDefaultInstance()) {
getKmsConfigBuilder().mergeFrom(value);
} else {
kmsConfig_ = value;
}
} else {
kmsConfigBuilder_.mergeFrom(value);
}
if (kmsConfig_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The KmsConfig being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.KmsConfig kms_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearKmsConfig() {
bitField0_ = (bitField0_ & ~0x00000002);
kmsConfig_ = null;
if (kmsConfigBuilder_ != null) {
kmsConfigBuilder_.dispose();
kmsConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The KmsConfig being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.KmsConfig kms_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.netapp.v1.KmsConfig.Builder getKmsConfigBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getKmsConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The KmsConfig being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.KmsConfig kms_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.netapp.v1.KmsConfigOrBuilder getKmsConfigOrBuilder() {
if (kmsConfigBuilder_ != null) {
return kmsConfigBuilder_.getMessageOrBuilder();
} else {
return kmsConfig_ == null
? com.google.cloud.netapp.v1.KmsConfig.getDefaultInstance()
: kmsConfig_;
}
}
/**
*
*
* <pre>
* Required. The KmsConfig being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.KmsConfig kms_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.netapp.v1.KmsConfig,
com.google.cloud.netapp.v1.KmsConfig.Builder,
com.google.cloud.netapp.v1.KmsConfigOrBuilder>
getKmsConfigFieldBuilder() {
if (kmsConfigBuilder_ == null) {
kmsConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.netapp.v1.KmsConfig,
com.google.cloud.netapp.v1.KmsConfig.Builder,
com.google.cloud.netapp.v1.KmsConfigOrBuilder>(
getKmsConfig(), getParentForChildren(), isClean());
kmsConfig_ = null;
}
return kmsConfigBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.netapp.v1.UpdateKmsConfigRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.netapp.v1.UpdateKmsConfigRequest)
private static final com.google.cloud.netapp.v1.UpdateKmsConfigRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.netapp.v1.UpdateKmsConfigRequest();
}
public static com.google.cloud.netapp.v1.UpdateKmsConfigRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateKmsConfigRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateKmsConfigRequest>() {
@java.lang.Override
public UpdateKmsConfigRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateKmsConfigRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateKmsConfigRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.netapp.v1.UpdateKmsConfigRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/coherence | 36,055 | prj/coherence-core-components/src/main/java/com/tangosol/coherence/component/net/management/model/localModel/StorageManagerModel.java |
/*
* Copyright (c) 2000, 2025, Oracle and/or its affiliates.
*
* Licensed under the Universal Permissive License v 1.0 as shown at
* https://oss.oracle.com/licenses/upl.
*/
// ---- class: com.tangosol.coherence.component.net.management.model.localModel.StorageManagerModel
package com.tangosol.coherence.component.net.management.model.localModel;
import com.tangosol.coherence.component.util.daemon.queueProcessor.service.grid.partitionedService.PartitionedCache;
import com.tangosol.coherence.component.util.daemon.queueProcessor.service.grid.partitionedService.partitionedCache.Storage;
import com.tangosol.internal.util.VersionHelper;
import com.tangosol.net.NamedCache;
import com.tangosol.net.events.internal.StorageDispatcher;
import com.tangosol.net.internal.PartitionSize;
import com.tangosol.net.internal.PartitionSizeAggregator;
import com.tangosol.util.Base;
import com.tangosol.util.ExternalizableHelper;
import com.tangosol.util.MapIndex;
import com.tangosol.util.filter.AlwaysFilter;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
/**
* Model components implement the JMX-managed functionality of the
* corresponding MBeans without being dependent on any JMX classes and could be
* used both in-process and out-of-process (relative to an MBeanServer).
*
* The LocalModel components operate in two distinct modes: live and snapshot.
* In the live mode all model methods call corresponding methods on managed
* objects. The snapshot mode uses the _SnapshotMap to keep the attribute
* values.
*
* Every time a remote invocation is used by the RemoteModel to do a
* setAttribute or invoke call, the snapshot model is refreshed.
*/
@SuppressWarnings({"deprecation", "rawtypes", "unused", "unchecked", "ConstantConditions", "DuplicatedCode", "ForLoopReplaceableByForEach", "IfCanBeSwitch", "RedundantArrayCreation", "RedundantSuppression", "SameParameterValue", "TryFinallyCanBeTryWithResources", "TryWithIdenticalCatches", "UnnecessaryBoxing", "UnnecessaryUnboxing", "UnusedAssignment"})
public class StorageManagerModel
extends com.tangosol.coherence.component.net.management.model.LocalModel
{
// ---- Fields declarations ----
/**
* Property _Storage
*
* The Storage object associated with this model.
*/
private transient Storage __m__Storage;
/**
* Property _StorageRef
*
* The Storage object associated with this model, wrapped in a
* WeakReference to avoid resource leakage.
*/
private transient java.lang.ref.WeakReference __m__StorageRef;
/**
* Property MaxQueryThresholdMillis
*
*/
private long __m_MaxQueryThresholdMillis;
// Default constructor
public StorageManagerModel()
{
this(null, null, true);
}
// Initializing constructor
public StorageManagerModel(String sName, com.tangosol.coherence.Component compParent, boolean fInit)
{
super(sName, compParent, false);
if (fInit)
{
__init();
}
}
// Main initializer
public void __init()
{
// private initialization
__initPrivate();
// state initialization: public and protected properties
try
{
set_SnapshotMap(new java.util.HashMap());
}
catch (java.lang.Exception e)
{
// re-throw as a runtime exception
throw new com.tangosol.util.WrapperException(e);
}
// signal the end of the initialization
set_Constructed(true);
}
// Private initializer
protected void __initPrivate()
{
super.__initPrivate();
}
//++ getter for static property _Instance
/**
* Getter for property _Instance.<p>
* Auto generated
*/
public static com.tangosol.coherence.Component get_Instance()
{
return new com.tangosol.coherence.component.net.management.model.localModel.StorageManagerModel();
}
//++ getter for static property _CLASS
/**
* Getter for property _CLASS.<p>
* Property with auto-generated accessor that returns the Class object for a
* given component.
*/
public static Class get_CLASS()
{
Class clz;
try
{
clz = Class.forName("com.tangosol.coherence/component/net/management/model/localModel/StorageManagerModel".replace('/', '.'));
}
catch (ClassNotFoundException e)
{
throw new NoClassDefFoundError(e.getMessage());
}
return clz;
}
//++ getter for autogen property _Module
/**
* This is an auto-generated method that returns the global [design time]
* parent component.
*
* Note: the class generator will ignore any custom implementation for this
* behavior.
*/
private com.tangosol.coherence.Component get_Module()
{
return this;
}
// Accessor for the property "_Storage"
/**
* Getter for property _Storage.<p>
* The Storage object associated with this model.
*/
public Storage get_Storage()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
// import java.lang.ref.WeakReference;
WeakReference wr = get_StorageRef();
return wr == null ? null : (Storage) wr.get();
}
// Accessor for the property "_StorageRef"
/**
* Getter for property _StorageRef.<p>
* The Storage object associated with this model, wrapped in a WeakReference
* to avoid resource leakage.
*/
protected java.lang.ref.WeakReference get_StorageRef()
{
return __m__StorageRef;
}
// Accessor for the property "EventInterceptorInfo"
/**
* Getter for property EventInterceptorInfo.<p>
* Statistics for the UEM event dispatcher.
*/
public String[] getEventInterceptorInfo()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
// import com.tangosol.net.events.internal.StorageDispatcher;
Storage storage = get_Storage();
StorageDispatcher dispatcher =
storage == null ? null : (StorageDispatcher) storage.getEventDispatcher();
if (dispatcher == null)
{
return new String[0];
}
return dispatcher.getStats().toStringArray();
}
// Accessor for the property "EventsDispatched"
/**
* Getter for property EventsDispatched.<p>
*/
public long getEventsDispatched()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
Storage storage = get_Storage();
return storage == null ? -1L : storage.getStatsEventsDispatched();
}
// Accessor for the property "EvictionCount"
/**
* Getter for property EvictionCount.<p>
* The number of evictions from the backing map managed by this
* StorageManager caused by entries expiry or insert operations that would
* make the underlying backing map to cross its configured size limit.
*/
public long getEvictionCount()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
Storage storage = get_Storage();
return storage == null ? -1L : storage.getStatsEvictions().get();
}
// Accessor for the property "IndexInfo"
/**
* Getter for property IndexInfo.<p>
*/
public String[] getIndexInfo()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
// import com.tangosol.util.MapIndex;
// import java.util.ArrayList;
// import java.util.ConcurrentModificationException;
// import java.util.Iterator;
// import java.util.List;
// import java.util.Map;
String[] asInfo = new String[0];
Storage storage = get_Storage();
Map mapIndex = storage == null ? null : storage.getIndexMap();
if (mapIndex != null && !mapIndex.isEmpty())
{
List listInfo = new ArrayList(mapIndex.size());
for (int cAttempts = 4; cAttempts > 0; --cAttempts)
{
try
{
for (Iterator iter = mapIndex.values().iterator(); iter.hasNext();)
{
MapIndex index = (MapIndex) iter.next();
if (index != null)
{
listInfo.add(index.toString());
}
}
break;
}
catch (ConcurrentModificationException e)
{
listInfo.clear();
}
}
asInfo = (String[]) listInfo.toArray(asInfo);
}
return asInfo;
}
// Accessor for the property "IndexingTotalMillis"
/**
* Getter for property IndexingTotalMillis.<p>
* The total time taken to build all indices on all partitions, whether as a
* result of addIndex() or re-distribution.
*/
public long getIndexingTotalMillis()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
Storage storage = get_Storage();
return storage == null ? -1L : storage.getStatsIndexingTotalMillis().get();
}
// Accessor for the property "IndexTotalUnits"
/**
* Getter for property IndexTotalUnits.<p>
* The total units used by all indices on the associated cache.
*/
public long getIndexTotalUnits()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
// import com.tangosol.util.Base;
// import com.tangosol.util.MapIndex;
// import com.tangosol.util.SimpleMapIndex;
// import java.util.ConcurrentModificationException;
// import java.util.Iterator;
// import java.util.Map;
long cUnits = 0L;
Storage storage = get_Storage();
Map mapIndex = storage == null ? null : storage.getIndexMap();
if (mapIndex != null && !mapIndex.isEmpty())
{
for (int cAttempts = 4; cAttempts > 0; --cAttempts)
{
try
{
for (Iterator iter = mapIndex.values().iterator(); iter.hasNext();)
{
MapIndex index = (MapIndex) iter.next();
if (index != null)
{
cUnits += index.getUnits();
}
}
break;
}
catch (ConcurrentModificationException e)
{
cUnits = 0;
}
}
}
return cUnits;
}
// Accessor for the property "InsertCount"
/**
* Getter for property InsertCount.<p>
*/
public long getInsertCount()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
Storage storage = get_Storage();
return storage == null ? -1L : storage.getStatsInserts().get();
}
// Accessor for the property "ListenerFilterCount"
/**
* Getter for property ListenerFilterCount.<p>
*/
public int getListenerFilterCount()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
// import java.util.ConcurrentModificationException;
// import java.util.Iterator;
// import java.util.Map;
// import java.util.Map$Entry as java.util.Map.Entry;
Storage storage = get_Storage();
Map map = storage == null ? null : storage.getListenerMap();
if (map == null)
{
return 0;
}
else
{
int cListeners = 0;
for (int cAttempts = 4; cAttempts > 0; --cAttempts)
{
try
{
for (Iterator iter = map.entrySet().iterator(); iter.hasNext(); )
{
java.util.Map.Entry entry = (java.util.Map.Entry) iter.next();
Map mapMember = (Map) entry.getValue();
if (mapMember != null)
{
cListeners += mapMember.size();
}
}
break;
}
catch (ConcurrentModificationException e)
{
cListeners = 0;
}
}
return cListeners;
}
}
// Accessor for the property "ListenerKeyCount"
/**
* Getter for property ListenerKeyCount.<p>
*/
public int getListenerKeyCount()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
// import java.util.ConcurrentModificationException;
// import java.util.Iterator;
// import java.util.Map;
// import java.util.Map$Entry as java.util.Map.Entry;
Storage storage = get_Storage();
Map map = storage == null ? null : storage.getKeyListenerMap();
if (map == null)
{
return 0;
}
else
{
int cListeners = 0;
for (int cAttempts = 4; cAttempts > 0; --cAttempts)
{
try
{
for (Iterator iter = map.entrySet().iterator(); iter.hasNext(); )
{
java.util.Map.Entry entry = (java.util.Map.Entry) iter.next();
Map mapMember = (Map) entry.getValue();
if (mapMember != null)
{
cListeners += mapMember.size();
}
}
break;
}
catch (ConcurrentModificationException e)
{
cListeners = 0;
}
}
return cListeners;
}
}
// Accessor for the property "ListenerRegistrations"
/**
* Getter for property ListenerRegistrations.<p>
*/
public long getListenerRegistrations()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
Storage storage = get_Storage();
return storage == null ? -1L : storage.getStatsListenerRegistrations().get();
}
// Accessor for the property "LocksGranted"
/**
* Getter for property LocksGranted.<p>
*/
public int getLocksGranted()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
// import java.util.Map;
Storage storage = get_Storage();
Map mapLocks = storage == null ? null : storage.getLeaseMap();
return mapLocks == null ? 0 : mapLocks.size();
}
// Accessor for the property "LocksPending"
/**
* Getter for property LocksPending.<p>
*/
public int getLocksPending()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
// import java.util.List;
Storage storage = get_Storage();
List listRequests = storage == null ? null : storage.getPendingLockRequest();
return listRequests == null ? 0 : listRequests.size();
}
// Accessor for the property "MaxQueryDescription"
/**
* Getter for property MaxQueryDescription.<p>
*/
public String getMaxQueryDescription()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
Storage storage = get_Storage();
return storage == null ? canonicalString(null) : storage.getStatsMaxQueryDescription();
}
// Accessor for the property "MaxQueryDurationMillis"
/**
* Getter for property MaxQueryDurationMillis.<p>
*/
public long getMaxQueryDurationMillis()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
Storage storage = get_Storage();
return storage == null ? -1L : storage.getStatsMaxQueryDurationMillis();
}
// Accessor for the property "MaxQueryThresholdMillis"
/**
* The query statistics threshold, defining when a query have been running
* long enough to be interresting for recording.
*/
public long getMaxQueryThresholdMillis()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
Storage storage = get_Storage();
return storage == null ? -1L : storage.getStatsMaxQueryThresholdMillis();
}
// Accessor for the property "NonOptimizedQueryAverageMillis"
/**
* Getter for property NonOptimizedQueryAverageMillis.<p>
*/
public long getNonOptimizedQueryAverageMillis()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
Storage storage = get_Storage();
return storage == null ? -1L : storage.getStatsNonOptimizedQueryAverageMillis();
}
// Accessor for the property "NonOptimizedQueryCount"
/**
* Getter for property NonOptimizedQueryCount.<p>
*/
public long getNonOptimizedQueryCount()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
Storage storage = get_Storage();
return storage == null ? -1L : storage.getStatsNonOptimizedQueryCount().get();
}
// Accessor for the property "NonOptimizedQueryTotalMillis"
/**
* Getter for property NonOptimizedQueryTotalMillis.<p>
*/
public long getNonOptimizedQueryTotalMillis()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
Storage storage = get_Storage();
return storage == null ? -1L : storage.getStatsNonOptimizedQueryTotalMillis().get();
}
// Accessor for the property "OptimizedQueryAverageMillis"
/**
* Getter for property OptimizedQueryAverageMillis.<p>
*/
public long getOptimizedQueryAverageMillis()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
Storage storage = get_Storage();
return storage == null ? -1L : storage.getStatsOptimizedQueryAverageMillis();
}
// Accessor for the property "OptimizedQueryCount"
/**
* Getter for property OptimizedQueryCount.<p>
*/
public long getOptimizedQueryCount()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
Storage storage = get_Storage();
return storage == null ? -1L : storage.getStatsOptimizedQueryCount().get();
}
// Accessor for the property "OptimizedQueryTotalMillis"
/**
* Getter for property OptimizedQueryTotalMillis.<p>
*/
public long getOptimizedQueryTotalMillis()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
Storage storage = get_Storage();
return storage == null ? -1L : storage.getStatsOptimizedQueryTotalMillis().get();
}
// Accessor for the property "QueryContentionCount"
/**
* Getter for property QueryContentionCount.<p>
* Total number of times a query had to be re-evaluated due to a concurrent
* update since statistics were last reset. This statistics provides a
* measure of an impact of concurrent updates on the query perfomance. If
* the total number of queries is Q and the number of contentions is C then
* the expected performance degradation factor should be no more than (Q +
* C)/Q.
*/
public long getQueryContentionCount()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
Storage storage = get_Storage();
return storage == null ? -1L : storage.getStatsQueryContentionCount().get();
}
// Accessor for the property "RemoveCount"
/**
* Getter for property RemoveCount.<p>
* The number of removes from the backing map managed by this StorageManager
* caused by operations such as clear, remove or invoke.
*/
public long getRemoveCount()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
Storage storage = get_Storage();
return storage == null ? -1L : storage.getStatsRemoves().get();
}
// Accessor for the property "ClearCount"
/**
* Getter for property ClearCount.<p>
* The number of `clear` operations since the last time statistics were reset.
*/
public long getClearCount()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
Storage storage = get_Storage();
return storage == null ? -1L : storage.getStatsClears().get();
}
// Accessor for the property "TriggerInfo"
/**
* Getter for property TriggerInfo.<p>
*/
public String[] getTriggerInfo()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
// import java.util.ArrayList;
// import java.util.ConcurrentModificationException;
// import java.util.Iterator;
// import java.util.List;
// import java.util.Set;
String[] asInfo = new String[0];
Storage storage = get_Storage();
Set setTrigger = storage == null ? null : storage.getTriggerSet();
if (setTrigger != null && !setTrigger.isEmpty())
{
List listInfo = new ArrayList(setTrigger.size());
for (int cAttempts = 4; cAttempts > 0; --cAttempts)
{
try
{
for (Iterator iter = setTrigger.iterator(); iter.hasNext();)
{
listInfo.add(iter.next().toString());
}
break;
}
catch (ConcurrentModificationException e)
{
listInfo.clear();
}
}
asInfo = (String[]) listInfo.toArray(asInfo);
}
return asInfo;
}
// Declared at the super level
/**
* Must be supplemented at each specific Model implementation.
*/
public void readExternal(java.io.DataInput in)
throws java.io.IOException
{
// import com.tangosol.util.Base;
// import com.tangosol.util.ExternalizableHelper;
// import java.util.Map;
super.readExternal(in);
Map mapSnapshot = get_SnapshotMap();
mapSnapshot.put("EventInterceptorInfo", ExternalizableHelper.readStringArray(in));
mapSnapshot.put("EventsDispatched", Base.makeLong(ExternalizableHelper.readLong(in)));
mapSnapshot.put("EvictionCount", Base.makeLong(ExternalizableHelper.readLong(in)));
mapSnapshot.put("IndexInfo", ExternalizableHelper.readStringArray(in));
mapSnapshot.put("IndexTotalUnits", Base.makeLong(ExternalizableHelper.readLong(in)));
mapSnapshot.put("InsertCount", Base.makeLong(ExternalizableHelper.readLong(in)));
mapSnapshot.put("ListenerFilterCount", Base.makeInteger(ExternalizableHelper.readInt(in)));
mapSnapshot.put("ListenerKeyCount", Base.makeInteger(ExternalizableHelper.readInt(in)));
mapSnapshot.put("ListenerRegistrations", Base.makeLong(ExternalizableHelper.readLong(in)));
mapSnapshot.put("LocksGranted", Base.makeInteger(ExternalizableHelper.readInt(in)));
mapSnapshot.put("LocksPending", Base.makeInteger(ExternalizableHelper.readInt(in)));
mapSnapshot.put("MaxQueryDescription", ExternalizableHelper.readUTF(in));
mapSnapshot.put("MaxQueryDurationMillis", Base.makeLong(ExternalizableHelper.readLong(in)));
mapSnapshot.put("MaxQueryThresholdMillis", Base.makeLong(ExternalizableHelper.readLong(in)));
mapSnapshot.put("NonOptimizedQueryAverageMillis", Base.makeLong(ExternalizableHelper.readLong(in)));
mapSnapshot.put("NonOptimizedQueryCount", Base.makeLong(ExternalizableHelper.readLong(in)));
mapSnapshot.put("NonOptimizedQueryTotalMillis", Base.makeLong(ExternalizableHelper.readLong(in)));
mapSnapshot.put("OptimizedQueryAverageMillis", Base.makeLong(ExternalizableHelper.readLong(in)));
mapSnapshot.put("OptimizedQueryCount", Base.makeLong(ExternalizableHelper.readLong(in)));
mapSnapshot.put("OptimizedQueryTotalMillis", Base.makeLong(ExternalizableHelper.readLong(in)));
mapSnapshot.put("QueryContentionCount", Base.makeLong(ExternalizableHelper.readLong(in)));
mapSnapshot.put("RemoveCount", Base.makeLong(ExternalizableHelper.readLong(in)));
mapSnapshot.put("TriggerInfo", ExternalizableHelper.readStringArray(in));
if (ExternalizableHelper.isVersionCompatible(in, 21, 6, 0))
{
mapSnapshot.put("IndexingTotalMillis", Base.makeLong(ExternalizableHelper.readLong(in)));
}
// added in 14.1.2.0.0 / 26.06.7 / 23.09.1
if (ExternalizableHelper.isVersionCompatible(in, VersionHelper.VERSION_23_09_1)
|| ExternalizableHelper.isPatchCompatible(in, VersionHelper.VERSION_14_1_2_0)
|| ExternalizableHelper.isPatchCompatible(in, VersionHelper.VERSION_14_1_1_2206_7))
{
mapSnapshot.put("ClearCount", ExternalizableHelper.readLong(in));
}
}
public void resetStatistics()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
Storage storage = get_Storage();
if (storage != null)
{
storage.resetStats();
}
}
public void clearCache()
{
checkReadOnly("clearCache");
Storage storage = get_Storage();
if (storage != null)
{
PartitionedCache service = storage.getService();
if (service != null)
{
NamedCache cache = service.ensureCache(storage.getCacheName(), null);
cache.clear();
}
}
}
public void truncateCache()
{
checkReadOnly("truncateCache");
Storage storage = get_Storage();
if (storage != null)
{
PartitionedCache service = storage.getService();
if (service != null)
{
NamedCache cache = service.ensureCache(storage.getCacheName(), null);
cache.truncate();
}
}
}
public void destroyCache()
{
checkReadOnly("destroyCache");
Storage storage = get_Storage();
if (storage != null)
{
PartitionedCache service = storage.getService();
if (service != null)
{
NamedCache cache = service.ensureCache(storage.getCacheName(), null);
cache.destroy();
}
}
}
public int size()
{
Storage storage = get_Storage();
if (storage != null)
{
PartitionedCache service = storage.getService();
if (service != null)
{
return service.ensureCache(storage.getCacheName(), null).size();
}
}
return 0;
}
/**
* Reports the partition stats in the format specified.
*
* @param sFormat specified the format of the data required. Valid values are "json", "csv" or "native".
* The "native" format is for use internally by the REST API only.
* @return
*/
public Object reportPartitionStats(String sFormat)
{
Storage storage = get_Storage();
if (storage != null)
{
PartitionedCache service = storage.getService();
if (service != null)
{
NamedCache cache = service.ensureCache(storage.getCacheName(), null);
Set<PartitionSize> setResults = (Set<PartitionSize>) cache.aggregate(AlwaysFilter.INSTANCE(), new PartitionSizeAggregator());
if ("native".equals(sFormat))
{
// Return PartitionSize[] as called from REST API
return setResults.toArray();
}
// default format to "json". Use an AtomicInteger, so we can use in the lambda as final
final String[] asFormats = new String[]{"{\"partitionId\":%d, \"count\": %d, \"totalSize\": %d, \"maxEntrySize\": %d, \"memberId\": %d}", "%d,%d,%d,%d,%d"};
final AtomicInteger index = new AtomicInteger(0);
String sJoin = ",\n";
String sFinal = "]";
StringBuilder sb = new StringBuilder();
if ("csv".equals(sFormat))
{
index.set(1);
sJoin = "\n";
sFinal = "";
}
else
{
sb.append("[");
}
String sResult = setResults.stream().map(v -> String.format(asFormats[index.intValue()], v.getPartitionId(), v.getCount(), v.getTotalSize(), v.getMaxEntrySize(), v.getMemberId()))
.collect(Collectors.joining(sJoin));
sb.append(sResult);
return sb.append(sFinal).toString();
}
}
return "[]]";
}
// Accessor for the property "_Storage"
/**
* Setter for property _Storage.<p>
* The Storage object associated with this model.
*/
public void set_Storage(Storage storage)
{
// import java.lang.ref.WeakReference;
set_StorageRef(new WeakReference(storage));
}
// Accessor for the property "_StorageRef"
/**
* Setter for property _StorageRef.<p>
* The Storage object associated with this model, wrapped in a WeakReference
* to avoid resource leakage.
*/
protected void set_StorageRef(java.lang.ref.WeakReference refStorage)
{
__m__StorageRef = refStorage;
}
// Accessor for the property "MaxQueryThresholdMillis"
/**
* Setter for property MaxQueryThresholdMillis.<p>
*/
public void setMaxQueryThresholdMillis(long cMillis)
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.PartitionedService.PartitionedCache$Storage as Storage;
checkReadOnly("setMaxQueryThresholdMillis");
Storage storage = get_Storage();
if (storage != null)
{
storage.setStatsMaxQueryThresholdMillis(cMillis);
}
}
// Declared at the super level
/**
* Must be supplemented at each specific Model implementation.
*/
public void writeExternal(java.io.DataOutput out)
throws java.io.IOException
{
// import com.tangosol.util.ExternalizableHelper;
super.writeExternal(out);
ExternalizableHelper.writeStringArray(out, getEventInterceptorInfo());
ExternalizableHelper.writeLong(out, getEventsDispatched());
ExternalizableHelper.writeLong(out, getEvictionCount());
ExternalizableHelper.writeStringArray(out, getIndexInfo());
ExternalizableHelper.writeLong(out, getIndexTotalUnits());
ExternalizableHelper.writeLong(out, getInsertCount());
ExternalizableHelper.writeInt(out, getListenerFilterCount());
ExternalizableHelper.writeInt(out, getListenerKeyCount());
ExternalizableHelper.writeLong(out, getListenerRegistrations());
ExternalizableHelper.writeInt(out, getLocksGranted());
ExternalizableHelper.writeInt(out, getLocksPending());
ExternalizableHelper.writeUTF(out, getMaxQueryDescription());
ExternalizableHelper.writeLong(out, getMaxQueryDurationMillis());
ExternalizableHelper.writeLong(out, getMaxQueryThresholdMillis());
ExternalizableHelper.writeLong(out, getNonOptimizedQueryAverageMillis());
ExternalizableHelper.writeLong(out, getNonOptimizedQueryCount());
ExternalizableHelper.writeLong(out, getNonOptimizedQueryTotalMillis());
ExternalizableHelper.writeLong(out, getOptimizedQueryAverageMillis());
ExternalizableHelper.writeLong(out, getOptimizedQueryCount());
ExternalizableHelper.writeLong(out, getOptimizedQueryTotalMillis());
ExternalizableHelper.writeLong(out, getQueryContentionCount());
ExternalizableHelper.writeLong(out, getRemoveCount());
ExternalizableHelper.writeStringArray(out, getTriggerInfo());
// added in 14.1.2.0.0 / 21.06
if (ExternalizableHelper.isVersionCompatible(out, 21, 6, 0))
{
ExternalizableHelper.writeLong(out, getIndexingTotalMillis());
}
// added in 14.1.2.0.0 / 22.06.7 / 23.09.1
if (ExternalizableHelper.isVersionCompatible(out, VersionHelper.VERSION_23_09_1)
|| ExternalizableHelper.isPatchCompatible(out, VersionHelper.VERSION_14_1_2_0)
|| ExternalizableHelper.isPatchCompatible(out, VersionHelper.VERSION_14_1_1_2206_7))
{
ExternalizableHelper.writeLong(out, getClearCount());
}
}
}
|
google/gapid | 36,197 | gapic/src/main/com/google/gapid/perfetto/views/TraceConfigDialog.java | /*
* Copyright (C) 2019 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.gapid.perfetto.views;
import static com.google.gapid.proto.SettingsProto.Perfetto.Vulkan.CpuTiming.CPU_TIMING_COMMAND_BUFFER;
import static com.google.gapid.proto.SettingsProto.Perfetto.Vulkan.CpuTiming.CPU_TIMING_DEVICE;
import static com.google.gapid.proto.SettingsProto.Perfetto.Vulkan.CpuTiming.CPU_TIMING_INSTANCE;
import static com.google.gapid.proto.SettingsProto.Perfetto.Vulkan.CpuTiming.CPU_TIMING_PHYSICAL_DEVICE;
import static com.google.gapid.proto.SettingsProto.Perfetto.Vulkan.CpuTiming.CPU_TIMING_QUEUE;
import static com.google.gapid.proto.SettingsProto.Perfetto.Vulkan.MemoryTracking.MEMORY_TRACKING_DEVICE;
import static com.google.gapid.proto.SettingsProto.Perfetto.Vulkan.MemoryTracking.MEMORY_TRACKING_DRIVER;
import static com.google.gapid.widgets.Widgets.createCheckbox;
import static com.google.gapid.widgets.Widgets.createComposite;
import static com.google.gapid.widgets.Widgets.createLabel;
import static com.google.gapid.widgets.Widgets.createLink;
import static com.google.gapid.widgets.Widgets.createSpinner;
import static com.google.gapid.widgets.Widgets.createTextarea;
import static com.google.gapid.widgets.Widgets.scheduleIfNotDisposed;
import static com.google.gapid.widgets.Widgets.withIndents;
import static com.google.gapid.widgets.Widgets.withLayoutData;
import static com.google.gapid.widgets.Widgets.withMargin;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.logging.Level.WARNING;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.gapid.models.Models;
import com.google.gapid.models.Settings;
import com.google.gapid.proto.SettingsProto;
import com.google.gapid.proto.device.Device;
import com.google.gapid.proto.device.GpuProfiling;
import com.google.gapid.util.Messages;
import com.google.gapid.widgets.DialogBase;
import com.google.gapid.widgets.Theme;
import com.google.gapid.widgets.Widgets;
import com.google.protobuf.ProtocolMessageEnum;
import com.google.protobuf.TextFormat;
import com.google.protobuf.TextFormat.ParseException;
import org.eclipse.jface.dialogs.IDialogConstants;
import org.eclipse.jface.window.Window;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.StackLayout;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Spinner;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableColumn;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.function.Consumer;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import perfetto.protos.PerfettoConfig;
import perfetto.protos.PerfettoConfig.TraceConfig.BufferConfig.FillPolicy;
public class TraceConfigDialog extends DialogBase {
protected static final Logger LOG = Logger.getLogger(TraceConfigDialog.class.getName());
private static final int MAIN_BUFFER_SIZE = 131072;
private static final int PROC_BUFFER_SIZE = 4096;
private static final int PROC_BUFFER = 1;
// Kernel ftrace buffer size per CPU.
private static final int FTRACE_BUFFER_SIZE = 8192;
private static final int PROC_SCAN_PERIOD = 2000;
private static final int FTRACE_DRAIN_PERIOD = 250;
private static final int MAX_IN_MEM_DURATION = 15 * 1000;
private static final int FLUSH_PERIOD = 5000;
private static final int WRITE_PERIOD = 2000;
private static final long MAX_FILE_SIZE = 2l * 1024 * 1024 * 1024;
// These ftrace categories are always enabled to track process creation and ending.
private static final String[] PROCESS_TRACKING_FTRACE = {
"sched/sched_process_free",
"task/task_newtask",
"task/task_rename",
};
// These ftrace categories are used to track CPU slices.
private static final String[] CPU_BASE_FTRACE = {
"sched/sched_switch",
"power/suspend_resume",
};
// These ftrace categories provide CPU frequency data.
private static final String[] CPU_FREQ_FTRACE = {
"power/cpu_frequency",
"power/cpu_idle"
};
// These ftrace categories provide scheduling dependency data.
private static final String[] CPU_CHAIN_FTRACE = {
"sched/sched_wakeup",
"sched/sched_wakeup_new",
"sched/sched_waking",
};
// These ftrace categories provide memory usage data.
private static final String[] MEM_FTRACE = {
"kmem/rss_stat",
};
private static final String[] CPU_SLICES_ATRACE = {
"am", "audio", "gfx", "hal", "input", "pm", "power", "res", "rs", "sm", "video", "view", "wm",
};
private static final String[] GPU_FREQ_FTRACE = {
"power/gpu_frequency",
};
private static final PerfettoConfig.MeminfoCounters[] MEM_COUNTERS = {
PerfettoConfig.MeminfoCounters.MEMINFO_MEM_TOTAL,
PerfettoConfig.MeminfoCounters.MEMINFO_MEM_FREE,
PerfettoConfig.MeminfoCounters.MEMINFO_BUFFERS,
PerfettoConfig.MeminfoCounters.MEMINFO_CACHED,
PerfettoConfig.MeminfoCounters.MEMINFO_SWAP_CACHED,
};
private static final PerfettoConfig.AndroidPowerConfig.BatteryCounters[] BAT_COUNTERS = {
PerfettoConfig.AndroidPowerConfig.BatteryCounters.BATTERY_COUNTER_CAPACITY_PERCENT,
PerfettoConfig.AndroidPowerConfig.BatteryCounters.BATTERY_COUNTER_CHARGE,
PerfettoConfig.AndroidPowerConfig.BatteryCounters.BATTERY_COUNTER_CURRENT,
};
private static final ImmutableMap<ProtocolMessageEnum, String> VK_LABLES =
ImmutableMap.<ProtocolMessageEnum, String> builder()
.put(CPU_TIMING_COMMAND_BUFFER, "VkCommandBuffer")
.put(CPU_TIMING_DEVICE, "VkDevice")
.put(CPU_TIMING_INSTANCE, "VkInstance")
.put(CPU_TIMING_PHYSICAL_DEVICE, "VkPhysicalDevice")
.put(CPU_TIMING_QUEUE, "VkQueue")
.put(MEMORY_TRACKING_DEVICE, "Device")
.put(MEMORY_TRACKING_DRIVER, "Driver")
.build();
private static final Pattern APP_REGEX = Pattern.compile("(?:[^:]*)?:([^/]+)(?:/[^/]+)");
private final Settings settings;
private final Device.PerfettoCapability caps;
private InputArea input;
public TraceConfigDialog(
Shell shell, Settings settings, Theme theme, Device.PerfettoCapability caps) {
super(shell, theme);
this.settings = settings;
this.caps = caps;
}
public static void showPerfettoConfigDialog(
Shell shell, Models models, Widgets widgets, Device.PerfettoCapability caps) {
new TraceConfigDialog(shell, models.settings, widgets.theme, caps).open();
}
public static String getConfigSummary(Settings settings, Device.PerfettoCapability caps) {
SettingsProto.PerfettoOrBuilder p = settings.perfetto();
if (p.getUseCustom()) {
return "Custom";
}
List<String> enabled = Lists.newArrayList();
if (p.getCpuOrBuilder().getEnabled()) {
enabled.add("CPU");
}
Device.GPUProfiling gpuCaps = caps.getGpuProfiling();
if (gpuCaps.getHasRenderStage() ||
gpuCaps.getGpuCounterDescriptor().getSpecsCount() > 0 ||
gpuCaps.getHasFrameLifecycle()) {
if (p.getGpuOrBuilder().getEnabled()) {
enabled.add("GPU");
}
}
if (p.getMemoryOrBuilder().getEnabled()) {
enabled.add("Memory");
}
if (p.getBatteryOrBuilder().getEnabled()) {
enabled.add("Battery");
}
if (p.getVulkanOrBuilder().getEnabled()) {
Device.VulkanProfilingLayers vkLayers = caps.getVulkanProfileLayers();
SettingsProto.Perfetto.VulkanOrBuilder vk = p.getVulkanOrBuilder();
if ((vk.getCpuTiming() && vkLayers.getCpuTiming()) ||
(vk.getMemoryTracking() && vkLayers.getMemoryTracker())) {
enabled.add("Vulkan");
}
}
return enabled.stream().collect(joining(", "));
}
public static PerfettoConfig.TraceConfig.Builder getConfig(
Settings settings, Device.PerfettoCapability caps, String traceTarget, int duration) {
SettingsProto.PerfettoOrBuilder p = settings.perfetto();
if (p.getUseCustom()) {
return p.getCustomConfig().toBuilder().setDurationMs(duration);
}
PerfettoConfig.TraceConfig.Builder config = PerfettoConfig.TraceConfig.newBuilder();
PerfettoConfig.FtraceConfig.Builder ftrace = config.addDataSourcesBuilder()
.getConfigBuilder()
.setName("linux.ftrace")
.getFtraceConfigBuilder()
.addAllFtraceEvents(Arrays.asList(PROCESS_TRACKING_FTRACE))
.setDrainPeriodMs(FTRACE_DRAIN_PERIOD)
.setBufferSizeKb(FTRACE_BUFFER_SIZE)
.setCompactSched(PerfettoConfig.FtraceConfig.CompactSchedConfig.newBuilder()
.setEnabled(true));
// Record process names at startup into the metadata buffer.
config.addDataSourcesBuilder()
.getConfigBuilder()
.setName("linux.process_stats")
.setTargetBuffer(PROC_BUFFER)
.getProcessStatsConfigBuilder()
.setScanAllProcessesOnStart(true);
// Periodically record process information into the main buffer.
config.addDataSourcesBuilder()
.getConfigBuilder()
.setName("linux.process_stats")
.getProcessStatsConfigBuilder()
.setProcStatsPollMs(PROC_SCAN_PERIOD)
.setProcStatsCacheTtlMs(10 * PROC_SCAN_PERIOD);
if (p.getCpuOrBuilder().getEnabled()) {
ftrace.addAllFtraceEvents(Arrays.asList(CPU_BASE_FTRACE));
if (p.getCpuOrBuilder().getFrequency()) {
ftrace.addAllFtraceEvents(Arrays.asList(CPU_FREQ_FTRACE));
}
if (p.getCpuOrBuilder().getChain()) {
ftrace.addAllFtraceEvents(Arrays.asList(CPU_CHAIN_FTRACE));
}
if (p.getCpuOrBuilder().getSlices() && caps.getCanSpecifyAtraceApps()) {
ftrace.addAllAtraceCategories(Arrays.asList(CPU_SLICES_ATRACE));
if (!traceTarget.isEmpty()) {
Matcher m = APP_REGEX.matcher(traceTarget);
ftrace.addAtraceApps(m.matches() ? m.group(1) : traceTarget);
}
}
}
if (p.getGpuOrBuilder().getEnabled()) {
ftrace.addAllFtraceEvents(Arrays.asList(GPU_FREQ_FTRACE));
Device.GPUProfiling gpuCaps = caps.getGpuProfiling();
SettingsProto.Perfetto.GPUOrBuilder gpu = p.getGpuOrBuilder();
if (gpuCaps.getHasRenderStage() && gpu.getSlices()) {
config.addDataSourcesBuilder()
.getConfigBuilder()
.setName("gpu.renderstages");
config.addDataSourcesBuilder()
.getConfigBuilder()
.setName("VulkanAPI");
}
if (gpuCaps.getGpuCounterDescriptor().getSpecsCount() > 0 &&
gpu.getCounters() && gpu.getCounterIdsCount() > 0) {
PerfettoConfig.GpuCounterConfig.Builder counters = config.addDataSourcesBuilder()
.getConfigBuilder()
.setName("gpu.counters")
.getGpuCounterConfigBuilder()
.setCounterPeriodNs(MILLISECONDS.toNanos(gpu.getCounterRate()));
counters.addAllCounterIds(gpu.getCounterIdsList());
}
if (gpuCaps.getHasFrameLifecycle() && gpu.getSurfaceFlinger()) {
config.addDataSourcesBuilder()
.getConfigBuilder()
.setName("android.surfaceflinger.frame");
}
}
if (p.getMemoryOrBuilder().getEnabled()) {
ftrace.addAllFtraceEvents(Arrays.asList(MEM_FTRACE));
config.addDataSourcesBuilder()
.getConfigBuilder()
.setName("linux.sys_stats")
.getSysStatsConfigBuilder()
.setMeminfoPeriodMs(p.getMemoryOrBuilder().getRate())
.addAllMeminfoCounters(Arrays.asList(MEM_COUNTERS));
}
if (p.getBatteryOrBuilder().getEnabled()) {
config.addDataSourcesBuilder()
.getConfigBuilder()
.setName("android.power")
.getAndroidPowerConfigBuilder()
.setBatteryPollMs(p.getBatteryOrBuilder().getRate())
.addAllBatteryCounters(Arrays.asList(BAT_COUNTERS));
}
boolean largeBuffer = false;
if (p.getVulkanOrBuilder().getEnabled()) {
Device.VulkanProfilingLayers vkLayers = caps.getVulkanProfileLayers();
SettingsProto.Perfetto.VulkanOrBuilder vk = p.getVulkanOrBuilder();
if (vkLayers.getCpuTiming() && vk.getCpuTiming()) {
largeBuffer = true;
config.addDataSourcesBuilder()
.getConfigBuilder()
.setName("VulkanCPUTiming")
.setLegacyConfig(vkLabels(vk.getCpuTimingCategoriesList()));
}
if (vkLayers.getMemoryTracker() && vk.getMemoryTracking()) {
config.addDataSourcesBuilder()
.getConfigBuilder()
.setName("VulkanMemoryTracker")
.setLegacyConfig(vkLabels(vk.getMemoryTrackingCategoriesList()));
}
}
// Buffer 0 (default): main buffer.
config.addBuffers(PerfettoConfig.TraceConfig.BufferConfig.newBuilder()
.setSizeKb((largeBuffer ? 8 : 1) * MAIN_BUFFER_SIZE)
.setFillPolicy(FillPolicy.DISCARD));
// Buffer 1: Initial process metadata.
config.addBuffers(PerfettoConfig.TraceConfig.BufferConfig.newBuilder()
.setSizeKb(PROC_BUFFER_SIZE)
.setFillPolicy(FillPolicy.DISCARD));
config.setFlushPeriodMs(FLUSH_PERIOD);
config.setDurationMs(duration);
if (duration > MAX_IN_MEM_DURATION) {
config.setWriteIntoFile(true);
config.setFileWritePeriodMs(WRITE_PERIOD);
config.setMaxFileSizeBytes(MAX_FILE_SIZE);
}
return config;
}
private static String vkLabels(List<?> list) {
return list.stream()
.map(VK_LABLES::get)
.filter(Objects::nonNull)
.distinct()
.collect(joining(":"));
}
@Override
public String getTitle() {
return Messages.CAPTURE_TRACE_PERFETTO;
}
@Override
protected Control createDialogArea(Composite parent) {
Composite area = (Composite)super.createDialogArea(parent);
Composite container = withLayoutData(createComposite(area, new StackLayout()),
new GridData(GridData.FILL_BOTH));
InputArea[] areas = new InputArea[2];
areas[0] = new BasicInputArea(
container, settings, theme, caps, () -> switchTo(container, areas[1]));
areas[1] = new AdvancedInputArea(
container, () -> switchTo(container, areas[0]), this::setOkButtonEnabled);
input = settings.perfetto().getUseCustom() ? areas[1] : areas[0];
((StackLayout)container.getLayout()).topControl = input.asControl();
// Delay this, so the dialog size is computed only based on the basic dialog.
scheduleIfNotDisposed(container, () -> input.onSwitchedTo(settings));
return area;
}
private void switchTo(Composite container, InputArea newArea) {
input = newArea;
((StackLayout)container.getLayout()).topControl = input.asControl();
container.requestLayout();
input.onSwitchedTo(settings);
setOkButtonEnabled(true);
}
private void setOkButtonEnabled(boolean enabled) {
Button button = getButton(IDialogConstants.OK_ID);
if (button != null) {
button.setEnabled(enabled);
}
}
@Override
protected void okPressed() {
input.update(settings);
super.okPressed();
}
private static interface InputArea {
public default void onSwitchedTo(@SuppressWarnings("unused") Settings settings) {
// Do nothing.
}
public void update(Settings settings);
public default Control asControl() {
return (Control)this;
}
}
private static class BasicInputArea extends Composite implements InputArea {
private static final int GROUP_INDENT = 20;
private final Button cpu;
private final Button cpuFreq;
private final Button cpuChain;
private final Button cpuSlices;
private final Button gpu;
private final Button gpuSlices;
private final Button gpuCounters;
private final Label[] gpuCountersLabels;
private final Button gpuCountersSelect;
private final Spinner gpuCountersRate;
private final Button gpuFrame;
private final Button mem;
private final Label[] memLabels;
private final Spinner memRate;
private final Button bat;
private final Label[] batLabels;
private final Spinner batRate;
private final Button vulkan;
private final Button vulkanCPUTiming;
private final Button vulkanCPUTimingCommandBuffer;
private final Button vulkanCPUTimingDevice;
private final Button vulkanCPUTimingInstance;
private final Button vulkanCPUTimingPhysicalDevice;
private final Button vulkanCPUTimingQueue;
private final Button vulkanMemoryTracking;
private final Button vulkanMemoryTrackingDevice;
private final Button vulkanMemoryTrackingDriver;
public BasicInputArea(Composite parent, Settings settings, Theme theme,
Device.PerfettoCapability caps, Runnable toAdvanced) {
super(parent, SWT.NONE);
setLayout(new GridLayout(1, false));
SettingsProto.Perfetto.CPUOrBuilder sCpu = settings.perfetto().getCpuOrBuilder();
SettingsProto.Perfetto.GPUOrBuilder sGpu = settings.perfetto().getGpuOrBuilder();
SettingsProto.Perfetto.MemoryOrBuilder sMem = settings.perfetto().getMemoryOrBuilder();
SettingsProto.Perfetto.BatteryOrBuilder sBatt = settings.perfetto().getBatteryOrBuilder();
SettingsProto.Perfetto.VulkanOrBuilder sVk = settings.perfetto().getVulkanOrBuilder();
cpu = createCheckbox(this, "CPU", sCpu.getEnabled(), e -> updateCpu());
Composite cpuGroup = withLayoutData(
createComposite(this, withMargin(new GridLayout(1, false), 5, 0)),
withIndents(new GridData(), GROUP_INDENT, 0));
cpuFreq = createCheckbox(cpuGroup, "Frequency and idle states", sCpu.getFrequency());
cpuChain = createCheckbox(cpuGroup, "Scheduling chains / latency", sCpu.getChain());
cpuSlices = createCheckbox(cpuGroup, "Thread slices", sCpu.getSlices());
addSeparator();
Device.GPUProfiling gpuCaps = caps.getGpuProfiling();
if (gpuCaps.getHasRenderStage() ||
gpuCaps.getGpuCounterDescriptor().getSpecsCount() > 0 ||
gpuCaps.getHasFrameLifecycle()) {
gpu = createCheckbox(this, "GPU", sGpu.getEnabled(), e -> updateGpu());
Composite gpuGroup = withLayoutData(
createComposite(this, withMargin(new GridLayout(1, false), 5, 0)),
withIndents(new GridData(), GROUP_INDENT, 0));
if (gpuCaps.getHasRenderStage()) {
gpuSlices = createCheckbox(gpuGroup, "Renderstage slices", sGpu.getSlices());
} else {
gpuSlices = null;
}
if (gpuCaps.getGpuCounterDescriptor().getSpecsCount() > 0) {
gpuCounters = createCheckbox(
gpuGroup, "Counters", sGpu.getCounters(), e -> updateGpu());
Composite counterGroup = withLayoutData(
createComposite(gpuGroup, withMargin(new GridLayout(3, false), 5, 0)),
withIndents(new GridData(), GROUP_INDENT, 0));
gpuCountersLabels = new Label[3];
gpuCountersLabels[1] = createLabel(counterGroup, "Poll Rate:");
gpuCountersRate = createSpinner(counterGroup, sGpu.getCounterRate(), 1, 1000);
gpuCountersLabels[2] = createLabel(counterGroup, "ms");
long count = caps.getGpuProfiling().getGpuCounterDescriptor().getSpecsList().stream()
.filter(c -> sGpu.getCounterIdsList().contains(c.getCounterId())).count();
gpuCountersLabels[0] = createLabel(counterGroup, count + " selected");
gpuCountersSelect = Widgets.createButton(counterGroup, "Select", e -> {
List<Integer> currentIds = settings.perfetto().getGpuOrBuilder().getCounterIdsList();
GpuCountersDialog dialog = new GpuCountersDialog(getShell(), theme, caps, currentIds);
if (dialog.open() == Window.OK) {
List<Integer> newIds = dialog.getSelectedIds();
settings.writePerfetto().getGpuBuilder()
.clearCounterIds()
.addAllCounterIds(newIds)
.setCounters(!newIds.isEmpty());
gpuCountersLabels[0].setText(newIds.size() + " selected");
gpuCountersLabels[0].requestLayout();
updateGpu();
}
});
} else {
gpuCounters = null;
gpuCountersLabels = null;
gpuCountersRate = null;
gpuCountersSelect = null;
}
if (gpuCaps.getHasFrameLifecycle()) {
gpuFrame = createCheckbox(
gpuGroup, "Frame Lifecycle", sGpu.getSurfaceFlinger(), e -> updateGpu());
} else {
gpuFrame = null;
}
addSeparator();
} else {
gpu = null;
gpuSlices = null;
gpuCounters = null;
gpuCountersLabels = null;
gpuCountersRate = null;
gpuCountersSelect = null;
gpuFrame = null;
}
mem = createCheckbox(this, "Memory", sMem.getEnabled(), e -> updateMem());
memLabels = new Label[2];
Composite memGroup = withLayoutData(
createComposite(this, withMargin(new GridLayout(3, false), 5, 0)),
withIndents(new GridData(), GROUP_INDENT, 0));
memLabels[0] = createLabel(memGroup, "Poll Rate:");
memRate = createSpinner(memGroup, sMem.getRate(), 1, 1000);
memLabels[1] = createLabel(memGroup, "ms");
addSeparator();
bat = createCheckbox(this, "Battery", sBatt.getEnabled(), e -> updateBat());
batLabels = new Label[2];
Composite batGroup = withLayoutData(
createComposite(this, withMargin(new GridLayout(3, false), 5, 0)),
withIndents(new GridData(), GROUP_INDENT, 0));
batLabels[0] = createLabel(batGroup, "Poll Rate:");
batRate = createSpinner(batGroup, sBatt.getRate(), 250, 60000);
batLabels[1] = createLabel(batGroup, "ms");
Device.VulkanProfilingLayers vkLayers = caps.getVulkanProfileLayers();
if (vkLayers.getCpuTiming() || vkLayers.getMemoryTracker()) {
addSeparator();
vulkan = createCheckbox(this, "Vulkan", sVk.getEnabled(), e -> updateVulkan());
Composite vkGroup = withLayoutData(
createComposite(this, new GridLayout(1, false)),
withIndents(new GridData(), GROUP_INDENT, 0));
if (vkLayers.getCpuTiming()) {
vulkanCPUTiming = createCheckbox(
vkGroup, "CPU Timing", sVk.getCpuTiming(), e -> updateVulkan());
Composite cpuTimingGroup = withLayoutData(
createComposite(vkGroup, withMargin(new GridLayout(1, false), 5, 0)),
withIndents(new GridData(), GROUP_INDENT, 0));
vulkanCPUTimingInstance =
createCheckbox(cpuTimingGroup, "Instance", hasCategory(sVk, CPU_TIMING_INSTANCE));
vulkanCPUTimingPhysicalDevice = createCheckbox(
cpuTimingGroup, "Physical Device", hasCategory(sVk, CPU_TIMING_PHYSICAL_DEVICE));
vulkanCPUTimingDevice =
createCheckbox(cpuTimingGroup, "Device", hasCategory(sVk, CPU_TIMING_DEVICE));
vulkanCPUTimingQueue =
createCheckbox(cpuTimingGroup, "Queue", hasCategory(sVk, CPU_TIMING_QUEUE));
vulkanCPUTimingCommandBuffer = createCheckbox(
cpuTimingGroup, "CommandBuffer", hasCategory(sVk, CPU_TIMING_COMMAND_BUFFER));
} else {
vulkanCPUTiming = null;
vulkanCPUTimingInstance = null;
vulkanCPUTimingPhysicalDevice = null;
vulkanCPUTimingDevice = null;
vulkanCPUTimingQueue = null;
vulkanCPUTimingCommandBuffer = null;
}
if (caps.getVulkanProfileLayers().getMemoryTracker()) {
vulkanMemoryTracking = createCheckbox(
vkGroup, "Memory Tracking", sVk.getMemoryTracking(), e -> updateVulkan());
Composite memoryTrackingGroup = withLayoutData(
createComposite(vkGroup, withMargin(new GridLayout(1, false), 5, 0)),
withIndents(new GridData(), GROUP_INDENT, 0));
vulkanMemoryTrackingDevice = createCheckbox(
memoryTrackingGroup, "Device", hasCategory(sVk, MEMORY_TRACKING_DEVICE));
vulkanMemoryTrackingDriver = createCheckbox(
memoryTrackingGroup, "Driver", hasCategory(sVk, MEMORY_TRACKING_DRIVER));
} else {
vulkanMemoryTracking = null;
vulkanMemoryTrackingDevice = null;
vulkanMemoryTrackingDriver = null;
}
} else {
vulkan = null;
vulkanCPUTiming = null;
vulkanCPUTimingInstance = null;
vulkanCPUTimingPhysicalDevice = null;
vulkanCPUTimingDevice = null;
vulkanCPUTimingQueue = null;
vulkanCPUTimingCommandBuffer = null;
vulkanMemoryTracking = null;
vulkanMemoryTrackingDevice = null;
vulkanMemoryTrackingDriver = null;
}
withLayoutData(createLink(this, "<a>Switch to advanced mode</a>", e -> {
// Remember the input thus far and turn it into a proto to be modified by the user.
update(settings);
settings.writePerfetto().setCustomConfig(
// Use a config that writes to file for custom by default.
getConfig(settings, caps, "", MAX_IN_MEM_DURATION + 1)
.clearDurationMs());
toAdvanced.run();
}), new GridData(SWT.END, SWT.BEGINNING, false, false));
updateCpu();
updateGpu();
updateMem();
updateBat();
updateVulkan();
}
private static boolean hasCategory(
SettingsProto.Perfetto.VulkanOrBuilder vk, SettingsProto.Perfetto.Vulkan.CpuTiming cat) {
return vk.getCpuTimingCategoriesList().contains(cat);
}
private static boolean hasCategory(SettingsProto.Perfetto.VulkanOrBuilder vk,
SettingsProto.Perfetto.Vulkan.MemoryTracking cat) {
return vk.getMemoryTrackingCategoriesList().contains(cat);
}
@Override
public void update(Settings settings) {
SettingsProto.Perfetto.CPU.Builder sCpu = settings.writePerfetto().getCpuBuilder();
SettingsProto.Perfetto.GPU.Builder sGpu = settings.writePerfetto().getGpuBuilder();
SettingsProto.Perfetto.Memory.Builder sMem = settings.writePerfetto().getMemoryBuilder();
SettingsProto.Perfetto.Battery.Builder sBatt = settings.writePerfetto().getBatteryBuilder();
SettingsProto.Perfetto.Vulkan.Builder sVk = settings.writePerfetto().getVulkanBuilder();
settings.writePerfetto().setUseCustom(false);
sCpu.setEnabled(cpu.getSelection());
sCpu.setChain(cpuChain.getSelection());
sCpu.setFrequency(cpuFreq.getSelection());
sCpu.setSlices(cpuSlices.getSelection());
if (gpu != null) {
sGpu.setEnabled(gpu.getSelection());
}
if (gpuSlices != null) {
sGpu.setSlices(gpuSlices.getSelection());
}
if (gpuCounters != null) {
sGpu.setCounters(gpuCounters.getSelection());
sGpu.setCounterRate(gpuCountersRate.getSelection());
}
if (gpuFrame != null) {
sGpu.setSurfaceFlinger(gpuFrame.getSelection());
}
sMem.setEnabled(mem.getSelection());
sMem.setRate(memRate.getSelection());
sBatt.setEnabled(bat.getSelection());
sBatt.setRate(batRate.getSelection());
if (vulkan != null) {
sVk.setEnabled(vulkan.getSelection());
}
if (vulkanCPUTiming != null) {
sVk.setCpuTiming(vulkanCPUTiming.getSelection());
sVk.clearCpuTimingCategories();
addCategory(vulkanCPUTimingCommandBuffer, sVk, CPU_TIMING_COMMAND_BUFFER);
addCategory(vulkanCPUTimingDevice, sVk, CPU_TIMING_DEVICE);
addCategory(vulkanCPUTimingPhysicalDevice, sVk, CPU_TIMING_PHYSICAL_DEVICE);
addCategory(vulkanCPUTimingInstance, sVk, CPU_TIMING_INSTANCE);
addCategory(vulkanCPUTimingQueue, sVk, CPU_TIMING_QUEUE);
}
if (vulkanMemoryTracking != null) {
sVk.setMemoryTracking(vulkanMemoryTracking.getSelection());
sVk.clearMemoryTrackingCategories();
addCategory(vulkanMemoryTrackingDevice, sVk, MEMORY_TRACKING_DEVICE);
addCategory(vulkanMemoryTrackingDriver, sVk, MEMORY_TRACKING_DRIVER);
}
}
private static void addCategory(Button checkbox, SettingsProto.Perfetto.Vulkan.Builder vk,
SettingsProto.Perfetto.Vulkan.CpuTiming cat) {
if (checkbox.getSelection()) {
vk.addCpuTimingCategories(cat);
}
}
private static void addCategory(Button checkbox, SettingsProto.Perfetto.Vulkan.Builder vk,
SettingsProto.Perfetto.Vulkan.MemoryTracking cat) {
if (checkbox.getSelection()) {
vk.addMemoryTrackingCategories(cat);
}
}
private void addSeparator() {
withLayoutData(new Label(this, SWT.SEPARATOR | SWT.HORIZONTAL),
new GridData(GridData.FILL_HORIZONTAL));
}
private void updateCpu() {
boolean enabled = cpu.getSelection();
cpuFreq.setEnabled(enabled);
cpuChain.setEnabled(enabled);
cpuSlices.setEnabled(enabled);
}
private void updateGpu() {
if (gpu == null) {
return;
}
boolean enabled = gpu.getSelection();
if (gpuSlices != null) {
gpuSlices.setEnabled(enabled);
}
if (gpuCounters != null) {
gpuCounters.setEnabled(enabled);
boolean countersEnabled = enabled && gpuCounters.getSelection();
gpuCountersRate.setEnabled(countersEnabled);
gpuCountersSelect.setEnabled(countersEnabled);
for (Label label : gpuCountersLabels) {
label.setEnabled(countersEnabled);
}
}
if (gpuFrame != null) {
gpuFrame.setEnabled(enabled);
}
}
private void updateVulkan() {
if (vulkan == null) {
return;
}
boolean vkEnabled = vulkan.getSelection();
if (vulkanCPUTiming != null) {
vulkanCPUTiming.setEnabled(vkEnabled);
boolean enabled = vkEnabled && vulkanCPUTiming.getSelection();
vulkanCPUTimingInstance.setEnabled(enabled);
vulkanCPUTimingPhysicalDevice.setEnabled(enabled);
vulkanCPUTimingDevice.setEnabled(enabled);
vulkanCPUTimingQueue.setEnabled(enabled);
vulkanCPUTimingCommandBuffer.setEnabled(enabled);
}
if (vulkanMemoryTracking != null) {
vulkanMemoryTracking.setEnabled(vkEnabled);
boolean enabled = vkEnabled && vulkanMemoryTracking.getSelection();
vulkanMemoryTrackingDevice.setEnabled(enabled);
vulkanMemoryTrackingDriver.setEnabled(enabled);
}
}
private void updateMem() {
boolean enabled = mem.getSelection();
memRate.setEnabled(enabled);
for (Label label : memLabels) {
label.setEnabled(enabled);
}
}
private void updateBat() {
boolean enabled = bat.getSelection();
batRate.setEnabled(enabled);
for (Label label : batLabels) {
label.setEnabled(enabled);
}
}
private static class GpuCountersDialog extends DialogBase {
private final Device.PerfettoCapability caps;
private final Set<Integer> currentIds;
private Table table;
private List<Integer> selectedIds;
public GpuCountersDialog(
Shell shell, Theme theme, Device.PerfettoCapability caps, List<Integer> currentIds) {
super(shell, theme);
this.caps = caps;
this.currentIds = Sets.newHashSet(currentIds);
}
public List<Integer> getSelectedIds() {
return selectedIds;
}
@Override
public String getTitle() {
return Messages.CAPTURE_TRACE_PERFETTO;
}
@Override
protected Control createDialogArea(Composite parent) {
Composite area = (Composite)super.createDialogArea(parent);
table = withLayoutData(new Table(area, SWT.CHECK), new GridData(GridData.FILL_BOTH));
table.setHeaderVisible(true);
table.setLinesVisible(true);
new TableColumn(table, SWT.NONE).setText("Name");
new TableColumn(table, SWT.NONE).setText("Description");
for (GpuProfiling.GpuCounterDescriptor.GpuCounterSpec counter :
caps.getGpuProfiling().getGpuCounterDescriptor().getSpecsList()) {
TableItem item = new TableItem(table, SWT.NONE);
item.setText(new String[] { counter.getName(), counter.getDescription() });
item.setData(counter);
if (currentIds.contains(counter.getCounterId())) {
item.setChecked(true);
}
}
table.getColumn(0).pack();
table.getColumn(1).pack();
createLink(area, "Select <a>none</a> | <a>all</a>", e -> {
boolean checked = "all".equals(e.text);
for (TableItem item : table.getItems()) {
item.setChecked(checked);
}
});
return area;
}
@Override
protected Point getInitialSize() {
return new Point(convertHorizontalDLUsToPixels(450), convertVerticalDLUsToPixels(300));
}
@Override
protected void okPressed() {
selectedIds = Arrays.stream(table.getItems())
.filter(item -> item.getChecked())
.map(item -> (GpuProfiling.GpuCounterDescriptor.GpuCounterSpec)item.getData())
.mapToInt(GpuProfiling.GpuCounterDescriptor.GpuCounterSpec::getCounterId)
.boxed()
.collect(toList());
super.okPressed();
}
}
}
private static class AdvancedInputArea extends Composite implements InputArea {
private final Text input;
public AdvancedInputArea(Composite parent, Runnable toBasic, Consumer<Boolean> okEnabled) {
super(parent, SWT.NONE);
setLayout(new GridLayout(1, false));
input = withLayoutData(createTextarea(this, ""),
new GridData(SWT.FILL, SWT.FILL, true, true));
withLayoutData(createLink(
this, "<a>Reset and switch back to basic</a>", e -> toBasic.run()),
new GridData(SWT.END, SWT.BEGINNING, false, false));
Label error = Widgets.createLabel(this, "");
error.setVisible(false);
error.setForeground(getDisplay().getSystemColor(SWT.COLOR_DARK_RED));
input.addListener(SWT.Modify, ev -> {
try {
TextFormat.merge(input.getText(), PerfettoConfig.TraceConfig.newBuilder());
error.setVisible(false);
error.setText("");
okEnabled.accept(true);
} catch (ParseException e) {
error.setVisible(true);
error.setText("Parse Error: " + e.getMessage());
okEnabled.accept(false);
}
error.requestLayout();
});
}
@Override
public void onSwitchedTo(Settings settings) {
input.setText(TextFormat.printToString(settings.perfetto().getCustomConfig()));
}
@Override
public void update(Settings settings) {
try {
TextFormat.merge(input.getText(), settings.writePerfetto()
.getCustomConfigBuilder()
.clear());
settings.writePerfetto().setUseCustom(true);
} catch (ParseException e) {
// This shouldn't happen as we disable the OK button.
LOG.log(WARNING, "Unexpected proto parse exception", e);
}
}
}
}
|
apache/jackrabbit-oak | 36,248 | oak-auth-ldap/src/main/java/org/apache/jackrabbit/oak/security/authentication/ldap/impl/LdapIdentityProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.security.authentication.ldap.impl;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import javax.jcr.Credentials;
import javax.jcr.SimpleCredentials;
import javax.security.auth.login.LoginException;
import org.apache.commons.pool2.impl.DefaultPooledObject;
import org.apache.directory.api.ldap.model.constants.SchemaConstants;
import org.apache.directory.api.ldap.model.cursor.CursorException;
import org.apache.directory.api.ldap.model.cursor.SearchCursor;
import org.apache.directory.api.ldap.model.entry.Attribute;
import org.apache.directory.api.ldap.model.entry.Entry;
import org.apache.directory.api.ldap.model.entry.Value;
import org.apache.directory.api.ldap.model.exception.LdapAuthenticationException;
import org.apache.directory.api.ldap.model.exception.LdapException;
import org.apache.directory.api.ldap.model.exception.LdapInvalidAttributeValueException;
import org.apache.directory.api.ldap.model.message.Response;
import org.apache.directory.api.ldap.model.message.ResultCodeEnum;
import org.apache.directory.api.ldap.model.message.SearchRequest;
import org.apache.directory.api.ldap.model.message.SearchRequestImpl;
import org.apache.directory.api.ldap.model.message.SearchResultDone;
import org.apache.directory.api.ldap.model.message.SearchResultEntry;
import org.apache.directory.api.ldap.model.message.SearchScope;
import org.apache.directory.api.ldap.model.message.controls.PagedResults;
import org.apache.directory.api.ldap.model.message.controls.PagedResultsImpl;
import org.apache.directory.api.ldap.model.name.Dn;
import org.apache.directory.api.ldap.model.name.Rdn;
import org.apache.directory.ldap.client.api.AbstractPoolableLdapConnectionFactory;
import org.apache.directory.ldap.client.api.DefaultLdapConnectionValidator;
import org.apache.directory.ldap.client.api.LdapConnection;
import org.apache.directory.ldap.client.api.LdapConnectionConfig;
import org.apache.directory.ldap.client.api.LdapConnectionPool;
import org.apache.directory.ldap.client.api.LookupLdapConnectionValidator;
import org.apache.directory.ldap.client.api.NoVerificationTrustManager;
import org.apache.directory.ldap.client.api.ValidatingPoolableLdapConnectionFactory;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Deactivate;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.ConfigurationPolicy;
import org.apache.jackrabbit.commons.iterator.AbstractLazyIterator;
import org.apache.jackrabbit.oak.commons.DebugTimer;
import org.apache.jackrabbit.oak.spi.security.ConfigurationParameters;
import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalGroup;
import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentity;
import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentityException;
import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentityProvider;
import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentityRef;
import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalUser;
import org.apache.jackrabbit.oak.spi.security.authentication.external.PrincipalNameResolver;
import org.apache.jackrabbit.util.Text;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.osgi.service.metatype.annotations.Designate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* {@code LdapIdentityProvider} implements an external identity provider that reads users and groups from an ldap
* source.
*
* Please refer to {@link LdapProviderConfig} for configuration options.
*/
@Component(
service = { ExternalIdentityProvider.class, PrincipalNameResolver.class },
configurationPolicy = ConfigurationPolicy.REQUIRE)
@Designate(
ocd = LdapProviderConfig.Configuration.class,
factory = true )
public class LdapIdentityProvider implements ExternalIdentityProvider, PrincipalNameResolver {
/**
* default logger
*/
private static final Logger log = LoggerFactory.getLogger(LdapIdentityProvider.class);
private static final String MARKER_CONNECT = "connect";
private static final String MARKER_LOOKUP = "lookup";
/**
* internal configuration
*/
private LdapProviderConfig config;
/**
* the connection pool with connections authenticated with the bind DN
*/
private LdapConnectionPool adminPool;
/**
* admin connection factory
*/
private AbstractPoolableLdapConnectionFactory adminConnectionFactory;
/**
* the connection pool with unbound connections
*/
private UnboundLdapConnectionPool userPool;
/**
* user connection factory
*/
private PoolableUnboundConnectionFactory userConnectionFactory;
/**
* Default constructor for OSGi
*/
@SuppressWarnings("UnusedDeclaration")
public LdapIdentityProvider() {
}
/**
* Constructor for non-OSGi cases.
* @param config the configuration
*/
public LdapIdentityProvider(@NotNull LdapProviderConfig config) {
this.config = config;
init();
}
//----------------------------------------------------< SCR integration >---
@SuppressWarnings("UnusedDeclaration")
@Activate
private void activate(Map<String, Object> properties) {
ConfigurationParameters cfg = ConfigurationParameters.of(properties);
config = LdapProviderConfig.of(cfg);
init();
}
@SuppressWarnings("UnusedDeclaration")
@Deactivate
private void deactivate() {
close();
}
/**
* Closes this provider and releases the internal pool. This should be called by Non-OSGi users of this provider.
*/
public void close() {
if (adminPool != null) {
try {
adminPool.close();
} catch (Exception e) {
log.warn("Error while closing LDAP connection pool", e);
}
adminPool = null;
}
if (userPool != null) {
try {
userPool.close();
} catch (Exception e) {
log.warn("Error while closing LDAP connection pool", e);
}
userPool = null;
}
}
//----------------------------------------------< PrincipalNameResolver >---
@NotNull
@Override
public String fromExternalIdentityRef(@NotNull ExternalIdentityRef externalIdentityRef) throws ExternalIdentityException {
if (!isMyRef(externalIdentityRef)) {
throw new ExternalIdentityException("Foreign IDP " + externalIdentityRef.getString());
}
return externalIdentityRef.getId();
}
//-------------------------------------------< ExternalIdentityProvider >---
@NotNull
@Override
public String getName() {
return config.getName();
}
@Override
public ExternalIdentity getIdentity(@NotNull ExternalIdentityRef ref) throws ExternalIdentityException {
if (!isMyRef(ref)) {
return null;
}
LdapConnection connection = connect();
try {
Entry entry;
String id = ref.getId();
boolean useUidForExtId = config.getUseUidForExtId();
String userIdAttr = config.getUserConfig().getIdAttribute();
String groupIdAttr = config.getGroupConfig().getIdAttribute();
String[] ca = config.getCustomAttributes();
if (useUidForExtId) {
entry = getEntry(connection, config.getUserConfig(), id, config.getCustomAttributes());
if (entry == null) {
entry = getEntry(connection, config.getGroupConfig(), id, config.getCustomAttributes());
}
} else {
if (ca.length == 0) {
entry = connection.lookup(id, SchemaConstants.ALL_USER_ATTRIBUTES);
}
else {
List<String> attributes = new ArrayList<>(Arrays.asList(ca));
attributes.add("objectClass");
attributes.add(userIdAttr);
attributes.add(groupIdAttr);
String[] attributeArray = new String[attributes.size()];
attributes.toArray(attributeArray);
entry = connection.lookup(id, attributeArray);
}
}
if (entry == null) {
return null;
} else if (entry.hasObjectClass(config.getUserConfig().getObjectClasses())) {
return createUser(entry, null);
} else if (entry.hasObjectClass(config.getGroupConfig().getObjectClasses())) {
return createGroup(entry, null);
} else {
log.warn("referenced identity is neither user or group: {}", ref.getString());
return null;
}
} catch (LdapException | CursorException e) {
throw lookupFailedException(e, null);
} finally {
disconnect(connection);
}
}
@Override
public ExternalUser getUser(@NotNull String userId) throws ExternalIdentityException {
DebugTimer timer = new DebugTimer();
LdapConnection connection = connect();
timer.mark(MARKER_CONNECT);
try {
Entry entry = getEntry(connection, config.getUserConfig(), userId, config.getCustomAttributes());
timer.mark(MARKER_LOOKUP);
log.debug("getUser({}) {}", userId, timer);
if (entry != null) {
return createUser(entry, userId);
} else {
return null;
}
} catch (LdapException | CursorException e) {
throw lookupFailedException(e, timer);
} finally {
disconnect(connection);
}
}
@Override
public ExternalGroup getGroup(@NotNull String name) throws ExternalIdentityException {
DebugTimer timer = new DebugTimer();
LdapConnection connection = connect();
timer.mark(MARKER_CONNECT);
try {
Entry entry = getEntry(connection, config.getGroupConfig(), name, config.getCustomAttributes());
timer.mark(MARKER_LOOKUP);
log.debug("getGroup({}) {}", name, timer);
if (entry != null) {
return createGroup(entry, name);
} else {
return null;
}
} catch (LdapException | CursorException e) {
throw lookupFailedException(e, timer);
} finally {
disconnect(connection);
}
}
@NotNull
@Override
public Iterator<ExternalUser> listUsers() throws ExternalIdentityException {
try {
final Iterator<Entry> iter = getEntryIterator(config.getUserConfig());
return new AbstractLazyIterator<ExternalUser>() {
@Override
protected ExternalUser getNext() {
while (iter.hasNext()) {
try {
return createUser(iter.next(), null);
} catch (LdapInvalidAttributeValueException e) {
log.warn("Error while creating external user object", e);
}
}
return null;
}
};
} catch (LdapException | CursorException e) {
throw lookupFailedException(e, null);
}
}
@NotNull
@Override
public Iterator<ExternalGroup> listGroups() throws ExternalIdentityException {
try {
final Iterator<Entry> iter = getEntryIterator(config.getGroupConfig());
return new AbstractLazyIterator<ExternalGroup>() {
@Override
protected ExternalGroup getNext() {
while (iter.hasNext()) {
try {
return createGroup(iter.next(), null);
} catch (LdapInvalidAttributeValueException e) {
log.warn("Error while creating external group object", e);
}
}
return null;
}
};
} catch (LdapException | CursorException e) {
throw lookupFailedException(e, null);
}
}
@Override
public ExternalUser authenticate(@NotNull Credentials credentials) throws ExternalIdentityException, LoginException {
if (!(credentials instanceof SimpleCredentials)) {
log.debug("LDAP IDP can only authenticate SimpleCredentials.");
return null;
}
final SimpleCredentials creds = (SimpleCredentials) credentials;
final LdapUser user = (LdapUser)getUser(creds.getUserID());
if (user != null) {
// OAK-2078: check for non-empty passwords to avoid anonymous bind on weakly configured servers
// see http://tools.ietf.org/html/rfc4513#section-5.1.1 for details.
if (creds.getPassword().length == 0) {
throw new LoginException("Refusing to authenticate against LDAP server: Empty passwords not allowed.");
}
// authenticate
LdapConnection connection = null;
try {
DebugTimer timer = new DebugTimer();
connection = createUserConnection();
timer.mark(MARKER_CONNECT);
connection.bind(user.getEntry().getDn(), new String(creds.getPassword()));
timer.mark("bind");
log.debug("authenticate({}) {}", user.getId(), timer);
} catch (LdapAuthenticationException e) {
throw new LoginException("Unable to authenticate against LDAP server: " + e.getMessage());
} catch (Exception e) {
throw error(e, "Error while binding user credentials");
} finally {
disconnectUserConnection(connection);
}
}
return user;
}
@NotNull
private LdapConnection createUserConnection() throws Exception {
if (userPool == null) {
return userConnectionFactory.create();
} else {
return userPool.getConnection();
}
}
private void disconnectUserConnection(@Nullable LdapConnection connection) {
if (connection != null) {
try {
if (userPool == null) {
userConnectionFactory.destroyObject(connection);
} else {
userPool.releaseConnection(connection);
}
} catch (Exception e) {
// ignore
}
}
}
//-----------------------------------------------------------< internal >---
/**
* Collects the declared (direct) groups of an identity
* @param ref reference to the identity
* @return map of identities where the key is the DN of the LDAP entity
*/
Map<String, ExternalIdentityRef> getDeclaredGroupRefs(ExternalIdentityRef ref, String dn) throws ExternalIdentityException {
if (!isMyRef(ref)) {
return Collections.emptyMap();
}
String searchFilter = config.getMemberOfSearchFilter(dn);
LdapConnection connection = null;
SearchCursor searchCursor = null;
try {
// Create the SearchRequest object
SearchRequest req = new SearchRequestImpl();
req.setScope(SearchScope.SUBTREE);
String idAttribute = config.getGroupConfig().getIdAttribute();
req.addAttributes(idAttribute == null? SchemaConstants.NO_ATTRIBUTE : idAttribute);
req.setTimeLimit((int) config.getSearchTimeout());
req.setBase(new Dn(config.getGroupConfig().getBaseDN()));
req.setFilter(searchFilter);
log.debug("getDeclaredGroupRefs: using SearchRequest {}.", req);
Map<String, ExternalIdentityRef> groups = new HashMap<>();
DebugTimer timer = new DebugTimer();
connection = connect();
timer.mark(MARKER_CONNECT);
searchCursor = connection.search(req);
timer.mark("search");
while (searchCursor.next()) {
Response response = searchCursor.get();
if (response instanceof SearchResultEntry) {
Entry resultEntry = ((SearchResultEntry) response).getEntry();
ExternalIdentityRef groupRef = new ExternalIdentityRef(resultEntry.getDn().toString(), this.getName());
groups.put(groupRef.getId(), groupRef);
}
}
timer.mark("iterate");
log.debug("getDeclaredGroupRefs: search below {} with {} found {} entries. {}",
config.getGroupConfig().getBaseDN(), searchFilter, groups.size(), timer);
return groups;
} catch (Exception e) {
throw error(e, "Error during ldap membership search.");
} finally {
closeSearchCursor(searchCursor);
disconnect(connection);
}
}
/**
* Collects the declared (direct) members of a group
* @param ref the reference to the group
* @return map of identity refers
* @throws ExternalIdentityException if an error occurs
*/
Map<String, ExternalIdentityRef> getDeclaredMemberRefs(ExternalIdentityRef ref, String dn) throws ExternalIdentityException {
if (!isMyRef(ref)) {
return Collections.emptyMap();
}
LdapConnection connection = null;
try {
Map<String, ExternalIdentityRef> members = new HashMap<>();
DebugTimer timer = new DebugTimer();
connection = connect();
timer.mark(MARKER_CONNECT);
Entry entry = connection.lookup(dn);
timer.mark(MARKER_LOOKUP);
Attribute attr = entry.get(config.getGroupMemberAttribute());
if (attr == null) {
log.warn("LDAP group does not have configured attribute: {}", config.getGroupMemberAttribute());
} else {
for (Value value: attr) {
ExternalIdentityRef memberRef = new ExternalIdentityRef(value.getString(), this.getName());
members.put(memberRef.getId(), memberRef);
}
}
timer.mark("iterate");
log.debug("members lookup of {} found {} members. {}", ref.getId(), members.size(), timer);
return members;
} catch (Exception e) {
throw error(e, "Error during ldap group members lookup.");
} finally {
disconnect(connection);
}
}
//------------------------------------------------------------< private >---
/**
* Initializes the ldap identity provider.
*/
private void init() {
if (adminConnectionFactory != null) {
throw new IllegalStateException("Provider already initialized.");
}
// setup admin connection pool
LdapConnectionConfig cc = createConnectionConfig();
String bindDN = config.getBindDN();
if (bindDN != null && !bindDN.isEmpty()) {
cc.setName(bindDN);
cc.setCredentials(config.getBindPassword());
}
adminConnectionFactory = new ValidatingPoolableLdapConnectionFactory(cc);
if (config.getAdminPoolConfig().lookupOnValidate()) {
adminConnectionFactory.setValidator(new LookupLdapConnectionValidator());
} else {
adminConnectionFactory.setValidator(new DefaultLdapConnectionValidator());
}
if (config.getAdminPoolConfig().getMaxActive() != 0) {
adminPool = new LdapConnectionPool(adminConnectionFactory);
adminPool.setTestOnBorrow(true);
adminPool.setMaxTotal(config.getAdminPoolConfig().getMaxActive());
adminPool.setBlockWhenExhausted(true);
adminPool.setMinEvictableIdleTimeMillis(config.getAdminPoolConfig().getMinEvictableIdleTimeMillis());
adminPool.setTimeBetweenEvictionRunsMillis(config.getAdminPoolConfig().getTimeBetweenEvictionRunsMillis());
adminPool.setNumTestsPerEvictionRun(config.getAdminPoolConfig().getNumTestsPerEvictionRun());
}
// setup unbound connection pool. let's create a new version of the config
cc = createConnectionConfig();
userConnectionFactory = new PoolableUnboundConnectionFactory(cc);
if (config.getUserPoolConfig().lookupOnValidate()) {
userConnectionFactory.setValidator(new UnboundLookupConnectionValidator());
} else {
userConnectionFactory.setValidator(new UnboundConnectionValidator());
}
if (config.getUserPoolConfig().getMaxActive() != 0) {
userPool = new UnboundLdapConnectionPool(userConnectionFactory);
userPool.setTestOnBorrow(true);
userPool.setMaxTotal(config.getUserPoolConfig().getMaxActive());
userPool.setBlockWhenExhausted(true);
userPool.setMinEvictableIdleTimeMillis(config.getUserPoolConfig().getMinEvictableIdleTimeMillis());
userPool.setTimeBetweenEvictionRunsMillis(config.getUserPoolConfig().getTimeBetweenEvictionRunsMillis());
userPool.setNumTestsPerEvictionRun(config.getUserPoolConfig().getNumTestsPerEvictionRun());
}
log.info("LdapIdentityProvider initialized: {}", config);
}
/**
* Creates a new connection config based on the config.
* @return the connection config.
*/
@NotNull
private LdapConnectionConfig createConnectionConfig() {
LdapConnectionConfig cc = new LdapConnectionConfig();
cc.setLdapHost(config.getHostname());
cc.setLdapPort(config.getPort());
cc.setUseSsl(config.useSSL());
cc.setUseTls(config.useTLS());
// todo: implement better trustmanager/keystore management (via sling/felix)
if (config.noCertCheck()) {
cc.setTrustManagers(new NoVerificationTrustManager());
}
String[] enabledProtocols = config.enabledProtocols();
if (enabledProtocols != null && enabledProtocols.length > 0) {
cc.setEnabledProtocols(enabledProtocols);
}
return cc;
}
@Nullable
private Entry getEntry(@NotNull LdapConnection connection, @NotNull LdapProviderConfig.Identity idConfig, @NotNull String id, @NotNull String[] customAttributes)
throws CursorException, LdapException {
String searchFilter = idConfig.getSearchFilter(id);
// Create the SearchRequest object
SearchRequest req = createSearchRequest(customAttributes, (int) config.getSearchTimeout(), idConfig.getBaseDN(), searchFilter);
log.debug("getEntry: using SearchRequest {}.", req);
// Process the request
SearchCursor searchCursor = null;
Entry resultEntry = null;
try {
searchCursor = connection.search(req);
while (searchCursor.next()) {
if (resultEntry != null) {
log.warn("search for {} returned more than one entry. discarding additional ones.", searchFilter);
} else {
// process the SearchResultEntry
Response response = searchCursor.get();
if (response instanceof SearchResultEntry) {
resultEntry = ((SearchResultEntry) response).getEntry();
}
}
}
} finally {
closeSearchCursor(searchCursor);
}
if (resultEntry == null) {
log.debug("getEntry: search below {} with {} found 0 entries.", idConfig.getBaseDN(), searchFilter);
} else {
log.debug("getEntry: search below {} with {} found {}", idConfig.getBaseDN(), searchFilter, resultEntry.getDn());
}
return resultEntry;
}
@NotNull
private SearchResultIterator getEntryIterator(@NotNull LdapProviderConfig.Identity idConfig) throws LdapException, CursorException, ExternalIdentityException {
StringBuilder filter = new StringBuilder();
int num = 0;
for (String objectClass: idConfig.getObjectClasses()) {
num++;
filter.append("(objectclass=")
.append(LdapProviderConfig.encodeFilterValue(objectClass))
.append(')');
}
String extraFilter = idConfig.getExtraFilter();
if (extraFilter != null && !extraFilter.isEmpty()) {
num++;
filter.append(extraFilter);
}
String searchFilter = num > 1
? "(&" + filter + ')'
: filter.toString();
return new SearchResultIterator(searchFilter, idConfig);
}
@NotNull
private static SearchRequest createSearchRequest(@NotNull String[] attributes, long timeout, @NotNull String baseDN, @NotNull String searchFilter) throws LdapException {
SearchRequest req = new SearchRequestImpl();
req.setScope(SearchScope.SUBTREE);
if (attributes.length == 0) {
req.addAttributes(SchemaConstants.ALL_USER_ATTRIBUTES);
} else {
req.addAttributes(attributes);
}
req.setTimeLimit((int) timeout);
req.setBase(new Dn(baseDN));
req.setFilter(searchFilter);
return req;
}
private final class SearchResultIterator implements Iterator<Entry> {
private final String searchFilter;
private final LdapProviderConfig.Identity idConfig;
private byte[] cookie;
private List<Entry> page = Collections.emptyList();
private boolean searchComplete;
private int pos = -1;
public SearchResultIterator(
@NotNull String searchFilter,
@NotNull LdapProviderConfig.Identity idConfig) throws LdapException, CursorException, ExternalIdentityException {
this.searchFilter = searchFilter;
this.idConfig = idConfig;
findNextEntry();
}
//-------------------------------------------------------< Iterator >---
@Override
public boolean hasNext() {
return pos >= 0;
}
@Override
public Entry next() {
if (hasNext()) {
try {
Entry entry = page.get(pos);
findNextEntry();
return entry;
} catch (LdapException | CursorException | ExternalIdentityException e) {
log.error("Error while performing LDAP search", e);
}
}
throw new NoSuchElementException();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
//-------------------------------------------------------< internal >---
@NotNull
private SearchRequest createSearchRequest(byte[] cookie, @NotNull String[] userAttributes) throws LdapException {
SearchRequest req = LdapIdentityProvider.createSearchRequest(userAttributes, config.getSearchTimeout(), idConfig.getBaseDN(), searchFilter);
// do paged searches (OAK-2874)
PagedResultsImpl pagedResults = new PagedResultsImpl();
pagedResults.setSize(1000);
pagedResults.setCookie(cookie);
req.addControl(pagedResults);
return req;
}
private boolean loadNextPage() throws ExternalIdentityException, LdapException, CursorException {
if (searchComplete) {
return false;
}
SearchCursor searchCursor = null;
DebugTimer timer = new DebugTimer();
LdapConnection connection = connect();
timer.mark(MARKER_CONNECT);
page = new ArrayList<>();
try {
SearchRequest req = createSearchRequest(cookie, config.getCustomAttributes());
log.debug("loadNextPage: using SearchRequest {}.", req);
searchCursor = connection.search(req);
while (searchCursor.next()) {
Response response = searchCursor.get();
if (response instanceof SearchResultEntry) {
Entry resultEntry = ((SearchResultEntry) response).getEntry();
page.add(resultEntry);
log.debug("loadNextPage: search below {} with {} found {}", idConfig.getBaseDN(), searchFilter, resultEntry.getDn());
}
}
boolean done = searchCursor.isDone();
cookie = null;
if (done) {
SearchResultDone searchResultDone = searchCursor.getSearchResultDone();
if (searchResultDone != null && searchResultDone.getLdapResult().getResultCode() != ResultCodeEnum.UNWILLING_TO_PERFORM) {
PagedResults ctrl = (PagedResults) searchResultDone.getControl(PagedResults.OID);
if (ctrl != null) {
cookie = ctrl.getCookie();
}
}
}
searchComplete = cookie == null || cookie.length == 0;
timer.mark(MARKER_LOOKUP);
return !page.isEmpty();
} finally {
closeSearchCursor(searchCursor);
disconnect(connection);
}
}
private void findNextEntry() throws LdapException, CursorException, ExternalIdentityException {
if (pos == -1 && !loadNextPage()) {
return;
}
if (pos + 1 == page.size()) {
pos = -1;
page = Collections.emptyList();
if (!loadNextPage()) {
return;
}
}
pos++;
}
}
@NotNull
private ExternalUser createUser(@NotNull Entry entry, @Nullable String id)
throws LdapInvalidAttributeValueException {
return (ExternalUser) createIdentity(entry, id, false);
}
@NotNull
private ExternalGroup createGroup(@NotNull Entry entry, @Nullable String id)
throws LdapInvalidAttributeValueException {
return (ExternalGroup) createIdentity(entry, id, true);
}
@NotNull
private ExternalIdentity createIdentity(@NotNull Entry entry, @Nullable String id, boolean isGroup)
throws LdapInvalidAttributeValueException {
LdapProviderConfig.Identity cfg = isGroup ? config.getGroupConfig() : config.getUserConfig();
if (id == null) {
String idAttribute = cfg.getIdAttribute();
Attribute attr = entry.get(idAttribute);
if (attr == null) {
throw new LdapInvalidAttributeValueException(ResultCodeEnum.CONSTRAINT_VIOLATION,
"no value found for attribute '" + idAttribute + "' for entry " + entry);
}
id = attr.getString();
}
String extId = config.getUseUidForExtId() ? id : entry.getDn().getName();
ExternalIdentityRef ref = new ExternalIdentityRef(extId, this.getName());
String path = cfg.makeDnPath()
? createDNPath(entry.getDn())
: null;
LdapIdentity identity = isGroup ? new LdapGroup(this, ref, id, path, entry)
: new LdapUser(this, ref, id, path, entry);
Map<String, Object> props = identity.getProperties();
applyAttributes(props, entry);
return identity;
}
private static void applyAttributes(Map<String, Object> props, Entry entry) throws LdapInvalidAttributeValueException {
for (Attribute attr: entry.getAttributes()) {
if (attr.isHumanReadable()) {
final Object propValue;
// for multivalue properties, store as collection
if (attr.size() > 1) {
List<String> values = new ArrayList<>();
for (Value value : attr) {
values.add(value.getString());
}
propValue = values;
} else {
propValue = attr.getString();
}
props.put(attr.getId(), propValue);
}
}
}
@NotNull
private LdapConnection connect() throws ExternalIdentityException {
try {
if (adminPool == null) {
return adminConnectionFactory.makeObject().getObject();
} else {
return adminPool.getConnection();
}
} catch (Exception e) {
throw error(e, "Error while connecting to the ldap server.");
}
}
private void disconnect(@Nullable LdapConnection connection) {
try {
if (connection != null) {
if (adminPool == null) {
adminConnectionFactory.destroyObject(new DefaultPooledObject<>(connection));
} else {
adminPool.releaseConnection(connection);
}
}
} catch (Exception e) {
log.warn("Error while disconnecting from the ldap server.", e);
}
}
private boolean isMyRef(@NotNull ExternalIdentityRef ref) {
final String refProviderName = ref.getProviderName();
return refProviderName == null || refProviderName.isEmpty() || getName().equals(refProviderName);
}
/**
* Makes the intermediate path of an DN by splitting along the RDNs
* @param dn the dn of the identity
* @return the intermediate path or {@code null} if disabled by config
*/
private static String createDNPath(Dn dn) {
StringBuilder path = new StringBuilder();
for (Rdn rnd: dn.getRdns()) {
if (path.length() > 0) {
path.append('/');
}
path.append(Text.escapeIllegalJcrChars(rnd.toString()));
}
return path.toString();
}
private static ExternalIdentityException lookupFailedException(@NotNull Exception e, @Nullable DebugTimer timer) {
String msg = "Error during ldap lookup. {}";
log.error(msg, ((timer != null) ? timer.getString() : ""), e);
return new ExternalIdentityException(msg, e);
}
private static ExternalIdentityException error(@NotNull Exception e, @NotNull String msg) {
log.error(msg, e);
return new ExternalIdentityException(msg, e);
}
private static void closeSearchCursor(@Nullable SearchCursor searchCursor) {
if (searchCursor != null) {
try {
searchCursor.close();
} catch (IOException e) {
log.warn("Failed to close search cursor.", e);
}
}
}
}
|
googleads/google-ads-java | 36,227 | google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/errors/CampaignCriterionErrorEnum.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v19/errors/campaign_criterion_error.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v19.errors;
/**
* <pre>
* Container for enum describing possible campaign criterion errors.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.errors.CampaignCriterionErrorEnum}
*/
public final class CampaignCriterionErrorEnum extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v19.errors.CampaignCriterionErrorEnum)
CampaignCriterionErrorEnumOrBuilder {
private static final long serialVersionUID = 0L;
// Use CampaignCriterionErrorEnum.newBuilder() to construct.
private CampaignCriterionErrorEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CampaignCriterionErrorEnum() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new CampaignCriterionErrorEnum();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.errors.CampaignCriterionErrorProto.internal_static_google_ads_googleads_v19_errors_CampaignCriterionErrorEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.errors.CampaignCriterionErrorProto.internal_static_google_ads_googleads_v19_errors_CampaignCriterionErrorEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum.class, com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum.Builder.class);
}
/**
* <pre>
* Enum describing possible campaign criterion errors.
* </pre>
*
* Protobuf enum {@code google.ads.googleads.v19.errors.CampaignCriterionErrorEnum.CampaignCriterionError}
*/
public enum CampaignCriterionError
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <pre>
* Enum unspecified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
UNSPECIFIED(0),
/**
* <pre>
* The received error code is not known in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
UNKNOWN(1),
/**
* <pre>
* Concrete type of criterion (keyword v.s. placement) is required for
* CREATE and UPDATE operations.
* </pre>
*
* <code>CONCRETE_TYPE_REQUIRED = 2;</code>
*/
CONCRETE_TYPE_REQUIRED(2),
/**
* <pre>
* Invalid placement URL.
* </pre>
*
* <code>INVALID_PLACEMENT_URL = 3;</code>
*/
INVALID_PLACEMENT_URL(3),
/**
* <pre>
* Criteria type can not be excluded for the campaign by the customer. like
* AOL account type cannot target site type criteria
* </pre>
*
* <code>CANNOT_EXCLUDE_CRITERIA_TYPE = 4;</code>
*/
CANNOT_EXCLUDE_CRITERIA_TYPE(4),
/**
* <pre>
* Cannot set the campaign criterion status for this criteria type.
* </pre>
*
* <code>CANNOT_SET_STATUS_FOR_CRITERIA_TYPE = 5;</code>
*/
CANNOT_SET_STATUS_FOR_CRITERIA_TYPE(5),
/**
* <pre>
* Cannot set the campaign criterion status for an excluded criteria.
* </pre>
*
* <code>CANNOT_SET_STATUS_FOR_EXCLUDED_CRITERIA = 6;</code>
*/
CANNOT_SET_STATUS_FOR_EXCLUDED_CRITERIA(6),
/**
* <pre>
* Cannot target and exclude the same criterion.
* </pre>
*
* <code>CANNOT_TARGET_AND_EXCLUDE = 7;</code>
*/
CANNOT_TARGET_AND_EXCLUDE(7),
/**
* <pre>
* The mutate contained too many operations.
* </pre>
*
* <code>TOO_MANY_OPERATIONS = 8;</code>
*/
TOO_MANY_OPERATIONS(8),
/**
* <pre>
* This operator cannot be applied to a criterion of this type.
* </pre>
*
* <code>OPERATOR_NOT_SUPPORTED_FOR_CRITERION_TYPE = 9;</code>
*/
OPERATOR_NOT_SUPPORTED_FOR_CRITERION_TYPE(9),
/**
* <pre>
* The Shopping campaign sales country is not supported for
* ProductSalesChannel targeting.
* </pre>
*
* <code>SHOPPING_CAMPAIGN_SALES_COUNTRY_NOT_SUPPORTED_FOR_SALES_CHANNEL = 10;</code>
*/
SHOPPING_CAMPAIGN_SALES_COUNTRY_NOT_SUPPORTED_FOR_SALES_CHANNEL(10),
/**
* <pre>
* The existing field can't be updated with CREATE operation. It can be
* updated with UPDATE operation only.
* </pre>
*
* <code>CANNOT_ADD_EXISTING_FIELD = 11;</code>
*/
CANNOT_ADD_EXISTING_FIELD(11),
/**
* <pre>
* Negative criteria are immutable, so updates are not allowed.
* </pre>
*
* <code>CANNOT_UPDATE_NEGATIVE_CRITERION = 12;</code>
*/
CANNOT_UPDATE_NEGATIVE_CRITERION(12),
/**
* <pre>
* Only free form names are allowed for negative Smart campaign keyword
* theme.
* </pre>
*
* <code>CANNOT_SET_NEGATIVE_KEYWORD_THEME_CONSTANT_CRITERION = 13;</code>
*/
CANNOT_SET_NEGATIVE_KEYWORD_THEME_CONSTANT_CRITERION(13),
/**
* <pre>
* Invalid Smart campaign keyword theme constant criterion.
* </pre>
*
* <code>INVALID_KEYWORD_THEME_CONSTANT = 14;</code>
*/
INVALID_KEYWORD_THEME_CONSTANT(14),
/**
* <pre>
* A Smart campaign keyword theme constant or free-form Smart campaign
* keyword theme is required.
* </pre>
*
* <code>MISSING_KEYWORD_THEME_CONSTANT_OR_FREE_FORM_KEYWORD_THEME = 15;</code>
*/
MISSING_KEYWORD_THEME_CONSTANT_OR_FREE_FORM_KEYWORD_THEME(15),
/**
* <pre>
* A Smart campaign may not target proximity and location criteria
* simultaneously.
* </pre>
*
* <code>CANNOT_TARGET_BOTH_PROXIMITY_AND_LOCATION_CRITERIA_FOR_SMART_CAMPAIGN = 16;</code>
*/
CANNOT_TARGET_BOTH_PROXIMITY_AND_LOCATION_CRITERIA_FOR_SMART_CAMPAIGN(16),
/**
* <pre>
* A Smart campaign may not target multiple proximity criteria.
* </pre>
*
* <code>CANNOT_TARGET_MULTIPLE_PROXIMITY_CRITERIA_FOR_SMART_CAMPAIGN = 17;</code>
*/
CANNOT_TARGET_MULTIPLE_PROXIMITY_CRITERIA_FOR_SMART_CAMPAIGN(17),
/**
* <pre>
* Location is not launched for Local Services Campaigns.
* </pre>
*
* <code>LOCATION_NOT_LAUNCHED_FOR_LOCAL_SERVICES_CAMPAIGN = 18;</code>
*/
LOCATION_NOT_LAUNCHED_FOR_LOCAL_SERVICES_CAMPAIGN(18),
/**
* <pre>
* A Local Services campaign may not target certain criteria types.
* </pre>
*
* <code>LOCATION_INVALID_FOR_LOCAL_SERVICES_CAMPAIGN = 19;</code>
*/
LOCATION_INVALID_FOR_LOCAL_SERVICES_CAMPAIGN(19),
/**
* <pre>
* Country locations are not supported for Local Services campaign.
* </pre>
*
* <code>CANNOT_TARGET_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN = 20;</code>
*/
CANNOT_TARGET_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN(20),
/**
* <pre>
* Location is not within the home country of Local Services campaign.
* </pre>
*
* <code>LOCATION_NOT_IN_HOME_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN = 21;</code>
*/
LOCATION_NOT_IN_HOME_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN(21),
/**
* <pre>
* Local Services profile does not exist for a particular Local Services
* campaign.
* </pre>
*
* <code>CANNOT_ADD_OR_REMOVE_LOCATION_FOR_LOCAL_SERVICES_CAMPAIGN = 22;</code>
*/
CANNOT_ADD_OR_REMOVE_LOCATION_FOR_LOCAL_SERVICES_CAMPAIGN(22),
/**
* <pre>
* Local Services campaign must have at least one target location.
* </pre>
*
* <code>AT_LEAST_ONE_POSITIVE_LOCATION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN = 23;</code>
*/
AT_LEAST_ONE_POSITIVE_LOCATION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN(23),
/**
* <pre>
* At least one positive local service ID criterion is required for a Local
* Services campaign.
* </pre>
*
* <code>AT_LEAST_ONE_LOCAL_SERVICE_ID_CRITERION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN = 24;</code>
*/
AT_LEAST_ONE_LOCAL_SERVICE_ID_CRITERION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN(24),
/**
* <pre>
* Local service ID is not found under selected categories in local
* services campaign setting.
* </pre>
*
* <code>LOCAL_SERVICE_ID_NOT_FOUND_FOR_CATEGORY = 25;</code>
*/
LOCAL_SERVICE_ID_NOT_FOUND_FOR_CATEGORY(25),
/**
* <pre>
* For search advertising channel, brand lists can only be applied to
* exclusive targeting, broad match campaigns for inclusive targeting or
* PMax generated campaigns.
* </pre>
*
* <code>CANNOT_ATTACH_BRAND_LIST_TO_NON_QUALIFIED_SEARCH_CAMPAIGN = 26;</code>
*/
CANNOT_ATTACH_BRAND_LIST_TO_NON_QUALIFIED_SEARCH_CAMPAIGN(26),
/**
* <pre>
* Campaigns that target all countries and territories are limited to a
* certain number of top-level location exclusions. If removing a criterion
* causes the campaign to target all countries and territories and the
* campaign has more top-level location exclusions than the limit allows,
* then this error is returned.
* </pre>
*
* <code>CANNOT_REMOVE_ALL_LOCATIONS_DUE_TO_TOO_MANY_COUNTRY_EXCLUSIONS = 27;</code>
*/
CANNOT_REMOVE_ALL_LOCATIONS_DUE_TO_TOO_MANY_COUNTRY_EXCLUSIONS(27),
UNRECOGNIZED(-1),
;
/**
* <pre>
* Enum unspecified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
public static final int UNSPECIFIED_VALUE = 0;
/**
* <pre>
* The received error code is not known in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
public static final int UNKNOWN_VALUE = 1;
/**
* <pre>
* Concrete type of criterion (keyword v.s. placement) is required for
* CREATE and UPDATE operations.
* </pre>
*
* <code>CONCRETE_TYPE_REQUIRED = 2;</code>
*/
public static final int CONCRETE_TYPE_REQUIRED_VALUE = 2;
/**
* <pre>
* Invalid placement URL.
* </pre>
*
* <code>INVALID_PLACEMENT_URL = 3;</code>
*/
public static final int INVALID_PLACEMENT_URL_VALUE = 3;
/**
* <pre>
* Criteria type can not be excluded for the campaign by the customer. like
* AOL account type cannot target site type criteria
* </pre>
*
* <code>CANNOT_EXCLUDE_CRITERIA_TYPE = 4;</code>
*/
public static final int CANNOT_EXCLUDE_CRITERIA_TYPE_VALUE = 4;
/**
* <pre>
* Cannot set the campaign criterion status for this criteria type.
* </pre>
*
* <code>CANNOT_SET_STATUS_FOR_CRITERIA_TYPE = 5;</code>
*/
public static final int CANNOT_SET_STATUS_FOR_CRITERIA_TYPE_VALUE = 5;
/**
* <pre>
* Cannot set the campaign criterion status for an excluded criteria.
* </pre>
*
* <code>CANNOT_SET_STATUS_FOR_EXCLUDED_CRITERIA = 6;</code>
*/
public static final int CANNOT_SET_STATUS_FOR_EXCLUDED_CRITERIA_VALUE = 6;
/**
* <pre>
* Cannot target and exclude the same criterion.
* </pre>
*
* <code>CANNOT_TARGET_AND_EXCLUDE = 7;</code>
*/
public static final int CANNOT_TARGET_AND_EXCLUDE_VALUE = 7;
/**
* <pre>
* The mutate contained too many operations.
* </pre>
*
* <code>TOO_MANY_OPERATIONS = 8;</code>
*/
public static final int TOO_MANY_OPERATIONS_VALUE = 8;
/**
* <pre>
* This operator cannot be applied to a criterion of this type.
* </pre>
*
* <code>OPERATOR_NOT_SUPPORTED_FOR_CRITERION_TYPE = 9;</code>
*/
public static final int OPERATOR_NOT_SUPPORTED_FOR_CRITERION_TYPE_VALUE = 9;
/**
* <pre>
* The Shopping campaign sales country is not supported for
* ProductSalesChannel targeting.
* </pre>
*
* <code>SHOPPING_CAMPAIGN_SALES_COUNTRY_NOT_SUPPORTED_FOR_SALES_CHANNEL = 10;</code>
*/
public static final int SHOPPING_CAMPAIGN_SALES_COUNTRY_NOT_SUPPORTED_FOR_SALES_CHANNEL_VALUE = 10;
/**
* <pre>
* The existing field can't be updated with CREATE operation. It can be
* updated with UPDATE operation only.
* </pre>
*
* <code>CANNOT_ADD_EXISTING_FIELD = 11;</code>
*/
public static final int CANNOT_ADD_EXISTING_FIELD_VALUE = 11;
/**
* <pre>
* Negative criteria are immutable, so updates are not allowed.
* </pre>
*
* <code>CANNOT_UPDATE_NEGATIVE_CRITERION = 12;</code>
*/
public static final int CANNOT_UPDATE_NEGATIVE_CRITERION_VALUE = 12;
/**
* <pre>
* Only free form names are allowed for negative Smart campaign keyword
* theme.
* </pre>
*
* <code>CANNOT_SET_NEGATIVE_KEYWORD_THEME_CONSTANT_CRITERION = 13;</code>
*/
public static final int CANNOT_SET_NEGATIVE_KEYWORD_THEME_CONSTANT_CRITERION_VALUE = 13;
/**
* <pre>
* Invalid Smart campaign keyword theme constant criterion.
* </pre>
*
* <code>INVALID_KEYWORD_THEME_CONSTANT = 14;</code>
*/
public static final int INVALID_KEYWORD_THEME_CONSTANT_VALUE = 14;
/**
* <pre>
* A Smart campaign keyword theme constant or free-form Smart campaign
* keyword theme is required.
* </pre>
*
* <code>MISSING_KEYWORD_THEME_CONSTANT_OR_FREE_FORM_KEYWORD_THEME = 15;</code>
*/
public static final int MISSING_KEYWORD_THEME_CONSTANT_OR_FREE_FORM_KEYWORD_THEME_VALUE = 15;
/**
* <pre>
* A Smart campaign may not target proximity and location criteria
* simultaneously.
* </pre>
*
* <code>CANNOT_TARGET_BOTH_PROXIMITY_AND_LOCATION_CRITERIA_FOR_SMART_CAMPAIGN = 16;</code>
*/
public static final int CANNOT_TARGET_BOTH_PROXIMITY_AND_LOCATION_CRITERIA_FOR_SMART_CAMPAIGN_VALUE = 16;
/**
* <pre>
* A Smart campaign may not target multiple proximity criteria.
* </pre>
*
* <code>CANNOT_TARGET_MULTIPLE_PROXIMITY_CRITERIA_FOR_SMART_CAMPAIGN = 17;</code>
*/
public static final int CANNOT_TARGET_MULTIPLE_PROXIMITY_CRITERIA_FOR_SMART_CAMPAIGN_VALUE = 17;
/**
* <pre>
* Location is not launched for Local Services Campaigns.
* </pre>
*
* <code>LOCATION_NOT_LAUNCHED_FOR_LOCAL_SERVICES_CAMPAIGN = 18;</code>
*/
public static final int LOCATION_NOT_LAUNCHED_FOR_LOCAL_SERVICES_CAMPAIGN_VALUE = 18;
/**
* <pre>
* A Local Services campaign may not target certain criteria types.
* </pre>
*
* <code>LOCATION_INVALID_FOR_LOCAL_SERVICES_CAMPAIGN = 19;</code>
*/
public static final int LOCATION_INVALID_FOR_LOCAL_SERVICES_CAMPAIGN_VALUE = 19;
/**
* <pre>
* Country locations are not supported for Local Services campaign.
* </pre>
*
* <code>CANNOT_TARGET_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN = 20;</code>
*/
public static final int CANNOT_TARGET_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN_VALUE = 20;
/**
* <pre>
* Location is not within the home country of Local Services campaign.
* </pre>
*
* <code>LOCATION_NOT_IN_HOME_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN = 21;</code>
*/
public static final int LOCATION_NOT_IN_HOME_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN_VALUE = 21;
/**
* <pre>
* Local Services profile does not exist for a particular Local Services
* campaign.
* </pre>
*
* <code>CANNOT_ADD_OR_REMOVE_LOCATION_FOR_LOCAL_SERVICES_CAMPAIGN = 22;</code>
*/
public static final int CANNOT_ADD_OR_REMOVE_LOCATION_FOR_LOCAL_SERVICES_CAMPAIGN_VALUE = 22;
/**
* <pre>
* Local Services campaign must have at least one target location.
* </pre>
*
* <code>AT_LEAST_ONE_POSITIVE_LOCATION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN = 23;</code>
*/
public static final int AT_LEAST_ONE_POSITIVE_LOCATION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN_VALUE = 23;
/**
* <pre>
* At least one positive local service ID criterion is required for a Local
* Services campaign.
* </pre>
*
* <code>AT_LEAST_ONE_LOCAL_SERVICE_ID_CRITERION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN = 24;</code>
*/
public static final int AT_LEAST_ONE_LOCAL_SERVICE_ID_CRITERION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN_VALUE = 24;
/**
* <pre>
* Local service ID is not found under selected categories in local
* services campaign setting.
* </pre>
*
* <code>LOCAL_SERVICE_ID_NOT_FOUND_FOR_CATEGORY = 25;</code>
*/
public static final int LOCAL_SERVICE_ID_NOT_FOUND_FOR_CATEGORY_VALUE = 25;
/**
* <pre>
* For search advertising channel, brand lists can only be applied to
* exclusive targeting, broad match campaigns for inclusive targeting or
* PMax generated campaigns.
* </pre>
*
* <code>CANNOT_ATTACH_BRAND_LIST_TO_NON_QUALIFIED_SEARCH_CAMPAIGN = 26;</code>
*/
public static final int CANNOT_ATTACH_BRAND_LIST_TO_NON_QUALIFIED_SEARCH_CAMPAIGN_VALUE = 26;
/**
* <pre>
* Campaigns that target all countries and territories are limited to a
* certain number of top-level location exclusions. If removing a criterion
* causes the campaign to target all countries and territories and the
* campaign has more top-level location exclusions than the limit allows,
* then this error is returned.
* </pre>
*
* <code>CANNOT_REMOVE_ALL_LOCATIONS_DUE_TO_TOO_MANY_COUNTRY_EXCLUSIONS = 27;</code>
*/
public static final int CANNOT_REMOVE_ALL_LOCATIONS_DUE_TO_TOO_MANY_COUNTRY_EXCLUSIONS_VALUE = 27;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static CampaignCriterionError valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static CampaignCriterionError forNumber(int value) {
switch (value) {
case 0: return UNSPECIFIED;
case 1: return UNKNOWN;
case 2: return CONCRETE_TYPE_REQUIRED;
case 3: return INVALID_PLACEMENT_URL;
case 4: return CANNOT_EXCLUDE_CRITERIA_TYPE;
case 5: return CANNOT_SET_STATUS_FOR_CRITERIA_TYPE;
case 6: return CANNOT_SET_STATUS_FOR_EXCLUDED_CRITERIA;
case 7: return CANNOT_TARGET_AND_EXCLUDE;
case 8: return TOO_MANY_OPERATIONS;
case 9: return OPERATOR_NOT_SUPPORTED_FOR_CRITERION_TYPE;
case 10: return SHOPPING_CAMPAIGN_SALES_COUNTRY_NOT_SUPPORTED_FOR_SALES_CHANNEL;
case 11: return CANNOT_ADD_EXISTING_FIELD;
case 12: return CANNOT_UPDATE_NEGATIVE_CRITERION;
case 13: return CANNOT_SET_NEGATIVE_KEYWORD_THEME_CONSTANT_CRITERION;
case 14: return INVALID_KEYWORD_THEME_CONSTANT;
case 15: return MISSING_KEYWORD_THEME_CONSTANT_OR_FREE_FORM_KEYWORD_THEME;
case 16: return CANNOT_TARGET_BOTH_PROXIMITY_AND_LOCATION_CRITERIA_FOR_SMART_CAMPAIGN;
case 17: return CANNOT_TARGET_MULTIPLE_PROXIMITY_CRITERIA_FOR_SMART_CAMPAIGN;
case 18: return LOCATION_NOT_LAUNCHED_FOR_LOCAL_SERVICES_CAMPAIGN;
case 19: return LOCATION_INVALID_FOR_LOCAL_SERVICES_CAMPAIGN;
case 20: return CANNOT_TARGET_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN;
case 21: return LOCATION_NOT_IN_HOME_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN;
case 22: return CANNOT_ADD_OR_REMOVE_LOCATION_FOR_LOCAL_SERVICES_CAMPAIGN;
case 23: return AT_LEAST_ONE_POSITIVE_LOCATION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN;
case 24: return AT_LEAST_ONE_LOCAL_SERVICE_ID_CRITERION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN;
case 25: return LOCAL_SERVICE_ID_NOT_FOUND_FOR_CATEGORY;
case 26: return CANNOT_ATTACH_BRAND_LIST_TO_NON_QUALIFIED_SEARCH_CAMPAIGN;
case 27: return CANNOT_REMOVE_ALL_LOCATIONS_DUE_TO_TOO_MANY_COUNTRY_EXCLUSIONS;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<CampaignCriterionError>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
CampaignCriterionError> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<CampaignCriterionError>() {
public CampaignCriterionError findValueByNumber(int number) {
return CampaignCriterionError.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum.getDescriptor().getEnumTypes().get(0);
}
private static final CampaignCriterionError[] VALUES = values();
public static CampaignCriterionError valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private CampaignCriterionError(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.ads.googleads.v19.errors.CampaignCriterionErrorEnum.CampaignCriterionError)
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum)) {
return super.equals(obj);
}
com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum other = (com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Container for enum describing possible campaign criterion errors.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.errors.CampaignCriterionErrorEnum}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.errors.CampaignCriterionErrorEnum)
com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnumOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.errors.CampaignCriterionErrorProto.internal_static_google_ads_googleads_v19_errors_CampaignCriterionErrorEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.errors.CampaignCriterionErrorProto.internal_static_google_ads_googleads_v19_errors_CampaignCriterionErrorEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum.class, com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum.Builder.class);
}
// Construct using com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v19.errors.CampaignCriterionErrorProto.internal_static_google_ads_googleads_v19_errors_CampaignCriterionErrorEnum_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum getDefaultInstanceForType() {
return com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum build() {
com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum buildPartial() {
com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum result = new com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum) {
return mergeFrom((com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum other) {
if (other == com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.errors.CampaignCriterionErrorEnum)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v19.errors.CampaignCriterionErrorEnum)
private static final com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum();
}
public static com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CampaignCriterionErrorEnum>
PARSER = new com.google.protobuf.AbstractParser<CampaignCriterionErrorEnum>() {
@java.lang.Override
public CampaignCriterionErrorEnum parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CampaignCriterionErrorEnum> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CampaignCriterionErrorEnum> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v19.errors.CampaignCriterionErrorEnum getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 36,227 | google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/errors/CampaignCriterionErrorEnum.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v20/errors/campaign_criterion_error.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v20.errors;
/**
* <pre>
* Container for enum describing possible campaign criterion errors.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.errors.CampaignCriterionErrorEnum}
*/
public final class CampaignCriterionErrorEnum extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v20.errors.CampaignCriterionErrorEnum)
CampaignCriterionErrorEnumOrBuilder {
private static final long serialVersionUID = 0L;
// Use CampaignCriterionErrorEnum.newBuilder() to construct.
private CampaignCriterionErrorEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CampaignCriterionErrorEnum() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new CampaignCriterionErrorEnum();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.errors.CampaignCriterionErrorProto.internal_static_google_ads_googleads_v20_errors_CampaignCriterionErrorEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.errors.CampaignCriterionErrorProto.internal_static_google_ads_googleads_v20_errors_CampaignCriterionErrorEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum.class, com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum.Builder.class);
}
/**
* <pre>
* Enum describing possible campaign criterion errors.
* </pre>
*
* Protobuf enum {@code google.ads.googleads.v20.errors.CampaignCriterionErrorEnum.CampaignCriterionError}
*/
public enum CampaignCriterionError
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <pre>
* Enum unspecified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
UNSPECIFIED(0),
/**
* <pre>
* The received error code is not known in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
UNKNOWN(1),
/**
* <pre>
* Concrete type of criterion (keyword v.s. placement) is required for
* CREATE and UPDATE operations.
* </pre>
*
* <code>CONCRETE_TYPE_REQUIRED = 2;</code>
*/
CONCRETE_TYPE_REQUIRED(2),
/**
* <pre>
* Invalid placement URL.
* </pre>
*
* <code>INVALID_PLACEMENT_URL = 3;</code>
*/
INVALID_PLACEMENT_URL(3),
/**
* <pre>
* Criteria type can not be excluded for the campaign by the customer. like
* AOL account type cannot target site type criteria
* </pre>
*
* <code>CANNOT_EXCLUDE_CRITERIA_TYPE = 4;</code>
*/
CANNOT_EXCLUDE_CRITERIA_TYPE(4),
/**
* <pre>
* Cannot set the campaign criterion status for this criteria type.
* </pre>
*
* <code>CANNOT_SET_STATUS_FOR_CRITERIA_TYPE = 5;</code>
*/
CANNOT_SET_STATUS_FOR_CRITERIA_TYPE(5),
/**
* <pre>
* Cannot set the campaign criterion status for an excluded criteria.
* </pre>
*
* <code>CANNOT_SET_STATUS_FOR_EXCLUDED_CRITERIA = 6;</code>
*/
CANNOT_SET_STATUS_FOR_EXCLUDED_CRITERIA(6),
/**
* <pre>
* Cannot target and exclude the same criterion.
* </pre>
*
* <code>CANNOT_TARGET_AND_EXCLUDE = 7;</code>
*/
CANNOT_TARGET_AND_EXCLUDE(7),
/**
* <pre>
* The mutate contained too many operations.
* </pre>
*
* <code>TOO_MANY_OPERATIONS = 8;</code>
*/
TOO_MANY_OPERATIONS(8),
/**
* <pre>
* This operator cannot be applied to a criterion of this type.
* </pre>
*
* <code>OPERATOR_NOT_SUPPORTED_FOR_CRITERION_TYPE = 9;</code>
*/
OPERATOR_NOT_SUPPORTED_FOR_CRITERION_TYPE(9),
/**
* <pre>
* The Shopping campaign sales country is not supported for
* ProductSalesChannel targeting.
* </pre>
*
* <code>SHOPPING_CAMPAIGN_SALES_COUNTRY_NOT_SUPPORTED_FOR_SALES_CHANNEL = 10;</code>
*/
SHOPPING_CAMPAIGN_SALES_COUNTRY_NOT_SUPPORTED_FOR_SALES_CHANNEL(10),
/**
* <pre>
* The existing field can't be updated with CREATE operation. It can be
* updated with UPDATE operation only.
* </pre>
*
* <code>CANNOT_ADD_EXISTING_FIELD = 11;</code>
*/
CANNOT_ADD_EXISTING_FIELD(11),
/**
* <pre>
* Negative criteria are immutable, so updates are not allowed.
* </pre>
*
* <code>CANNOT_UPDATE_NEGATIVE_CRITERION = 12;</code>
*/
CANNOT_UPDATE_NEGATIVE_CRITERION(12),
/**
* <pre>
* Only free form names are allowed for negative Smart campaign keyword
* theme.
* </pre>
*
* <code>CANNOT_SET_NEGATIVE_KEYWORD_THEME_CONSTANT_CRITERION = 13;</code>
*/
CANNOT_SET_NEGATIVE_KEYWORD_THEME_CONSTANT_CRITERION(13),
/**
* <pre>
* Invalid Smart campaign keyword theme constant criterion.
* </pre>
*
* <code>INVALID_KEYWORD_THEME_CONSTANT = 14;</code>
*/
INVALID_KEYWORD_THEME_CONSTANT(14),
/**
* <pre>
* A Smart campaign keyword theme constant or free-form Smart campaign
* keyword theme is required.
* </pre>
*
* <code>MISSING_KEYWORD_THEME_CONSTANT_OR_FREE_FORM_KEYWORD_THEME = 15;</code>
*/
MISSING_KEYWORD_THEME_CONSTANT_OR_FREE_FORM_KEYWORD_THEME(15),
/**
* <pre>
* A Smart campaign may not target proximity and location criteria
* simultaneously.
* </pre>
*
* <code>CANNOT_TARGET_BOTH_PROXIMITY_AND_LOCATION_CRITERIA_FOR_SMART_CAMPAIGN = 16;</code>
*/
CANNOT_TARGET_BOTH_PROXIMITY_AND_LOCATION_CRITERIA_FOR_SMART_CAMPAIGN(16),
/**
* <pre>
* A Smart campaign may not target multiple proximity criteria.
* </pre>
*
* <code>CANNOT_TARGET_MULTIPLE_PROXIMITY_CRITERIA_FOR_SMART_CAMPAIGN = 17;</code>
*/
CANNOT_TARGET_MULTIPLE_PROXIMITY_CRITERIA_FOR_SMART_CAMPAIGN(17),
/**
* <pre>
* Location is not launched for Local Services Campaigns.
* </pre>
*
* <code>LOCATION_NOT_LAUNCHED_FOR_LOCAL_SERVICES_CAMPAIGN = 18;</code>
*/
LOCATION_NOT_LAUNCHED_FOR_LOCAL_SERVICES_CAMPAIGN(18),
/**
* <pre>
* A Local Services campaign may not target certain criteria types.
* </pre>
*
* <code>LOCATION_INVALID_FOR_LOCAL_SERVICES_CAMPAIGN = 19;</code>
*/
LOCATION_INVALID_FOR_LOCAL_SERVICES_CAMPAIGN(19),
/**
* <pre>
* Country locations are not supported for Local Services campaign.
* </pre>
*
* <code>CANNOT_TARGET_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN = 20;</code>
*/
CANNOT_TARGET_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN(20),
/**
* <pre>
* Location is not within the home country of Local Services campaign.
* </pre>
*
* <code>LOCATION_NOT_IN_HOME_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN = 21;</code>
*/
LOCATION_NOT_IN_HOME_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN(21),
/**
* <pre>
* Local Services profile does not exist for a particular Local Services
* campaign.
* </pre>
*
* <code>CANNOT_ADD_OR_REMOVE_LOCATION_FOR_LOCAL_SERVICES_CAMPAIGN = 22;</code>
*/
CANNOT_ADD_OR_REMOVE_LOCATION_FOR_LOCAL_SERVICES_CAMPAIGN(22),
/**
* <pre>
* Local Services campaign must have at least one target location.
* </pre>
*
* <code>AT_LEAST_ONE_POSITIVE_LOCATION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN = 23;</code>
*/
AT_LEAST_ONE_POSITIVE_LOCATION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN(23),
/**
* <pre>
* At least one positive local service ID criterion is required for a Local
* Services campaign.
* </pre>
*
* <code>AT_LEAST_ONE_LOCAL_SERVICE_ID_CRITERION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN = 24;</code>
*/
AT_LEAST_ONE_LOCAL_SERVICE_ID_CRITERION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN(24),
/**
* <pre>
* Local service ID is not found under selected categories in local
* services campaign setting.
* </pre>
*
* <code>LOCAL_SERVICE_ID_NOT_FOUND_FOR_CATEGORY = 25;</code>
*/
LOCAL_SERVICE_ID_NOT_FOUND_FOR_CATEGORY(25),
/**
* <pre>
* For search advertising channel, brand lists can only be applied to
* exclusive targeting, broad match campaigns for inclusive targeting or
* PMax generated campaigns.
* </pre>
*
* <code>CANNOT_ATTACH_BRAND_LIST_TO_NON_QUALIFIED_SEARCH_CAMPAIGN = 26;</code>
*/
CANNOT_ATTACH_BRAND_LIST_TO_NON_QUALIFIED_SEARCH_CAMPAIGN(26),
/**
* <pre>
* Campaigns that target all countries and territories are limited to a
* certain number of top-level location exclusions. If removing a criterion
* causes the campaign to target all countries and territories and the
* campaign has more top-level location exclusions than the limit allows,
* then this error is returned.
* </pre>
*
* <code>CANNOT_REMOVE_ALL_LOCATIONS_DUE_TO_TOO_MANY_COUNTRY_EXCLUSIONS = 27;</code>
*/
CANNOT_REMOVE_ALL_LOCATIONS_DUE_TO_TOO_MANY_COUNTRY_EXCLUSIONS(27),
UNRECOGNIZED(-1),
;
/**
* <pre>
* Enum unspecified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
public static final int UNSPECIFIED_VALUE = 0;
/**
* <pre>
* The received error code is not known in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
public static final int UNKNOWN_VALUE = 1;
/**
* <pre>
* Concrete type of criterion (keyword v.s. placement) is required for
* CREATE and UPDATE operations.
* </pre>
*
* <code>CONCRETE_TYPE_REQUIRED = 2;</code>
*/
public static final int CONCRETE_TYPE_REQUIRED_VALUE = 2;
/**
* <pre>
* Invalid placement URL.
* </pre>
*
* <code>INVALID_PLACEMENT_URL = 3;</code>
*/
public static final int INVALID_PLACEMENT_URL_VALUE = 3;
/**
* <pre>
* Criteria type can not be excluded for the campaign by the customer. like
* AOL account type cannot target site type criteria
* </pre>
*
* <code>CANNOT_EXCLUDE_CRITERIA_TYPE = 4;</code>
*/
public static final int CANNOT_EXCLUDE_CRITERIA_TYPE_VALUE = 4;
/**
* <pre>
* Cannot set the campaign criterion status for this criteria type.
* </pre>
*
* <code>CANNOT_SET_STATUS_FOR_CRITERIA_TYPE = 5;</code>
*/
public static final int CANNOT_SET_STATUS_FOR_CRITERIA_TYPE_VALUE = 5;
/**
* <pre>
* Cannot set the campaign criterion status for an excluded criteria.
* </pre>
*
* <code>CANNOT_SET_STATUS_FOR_EXCLUDED_CRITERIA = 6;</code>
*/
public static final int CANNOT_SET_STATUS_FOR_EXCLUDED_CRITERIA_VALUE = 6;
/**
* <pre>
* Cannot target and exclude the same criterion.
* </pre>
*
* <code>CANNOT_TARGET_AND_EXCLUDE = 7;</code>
*/
public static final int CANNOT_TARGET_AND_EXCLUDE_VALUE = 7;
/**
* <pre>
* The mutate contained too many operations.
* </pre>
*
* <code>TOO_MANY_OPERATIONS = 8;</code>
*/
public static final int TOO_MANY_OPERATIONS_VALUE = 8;
/**
* <pre>
* This operator cannot be applied to a criterion of this type.
* </pre>
*
* <code>OPERATOR_NOT_SUPPORTED_FOR_CRITERION_TYPE = 9;</code>
*/
public static final int OPERATOR_NOT_SUPPORTED_FOR_CRITERION_TYPE_VALUE = 9;
/**
* <pre>
* The Shopping campaign sales country is not supported for
* ProductSalesChannel targeting.
* </pre>
*
* <code>SHOPPING_CAMPAIGN_SALES_COUNTRY_NOT_SUPPORTED_FOR_SALES_CHANNEL = 10;</code>
*/
public static final int SHOPPING_CAMPAIGN_SALES_COUNTRY_NOT_SUPPORTED_FOR_SALES_CHANNEL_VALUE = 10;
/**
* <pre>
* The existing field can't be updated with CREATE operation. It can be
* updated with UPDATE operation only.
* </pre>
*
* <code>CANNOT_ADD_EXISTING_FIELD = 11;</code>
*/
public static final int CANNOT_ADD_EXISTING_FIELD_VALUE = 11;
/**
* <pre>
* Negative criteria are immutable, so updates are not allowed.
* </pre>
*
* <code>CANNOT_UPDATE_NEGATIVE_CRITERION = 12;</code>
*/
public static final int CANNOT_UPDATE_NEGATIVE_CRITERION_VALUE = 12;
/**
* <pre>
* Only free form names are allowed for negative Smart campaign keyword
* theme.
* </pre>
*
* <code>CANNOT_SET_NEGATIVE_KEYWORD_THEME_CONSTANT_CRITERION = 13;</code>
*/
public static final int CANNOT_SET_NEGATIVE_KEYWORD_THEME_CONSTANT_CRITERION_VALUE = 13;
/**
* <pre>
* Invalid Smart campaign keyword theme constant criterion.
* </pre>
*
* <code>INVALID_KEYWORD_THEME_CONSTANT = 14;</code>
*/
public static final int INVALID_KEYWORD_THEME_CONSTANT_VALUE = 14;
/**
* <pre>
* A Smart campaign keyword theme constant or free-form Smart campaign
* keyword theme is required.
* </pre>
*
* <code>MISSING_KEYWORD_THEME_CONSTANT_OR_FREE_FORM_KEYWORD_THEME = 15;</code>
*/
public static final int MISSING_KEYWORD_THEME_CONSTANT_OR_FREE_FORM_KEYWORD_THEME_VALUE = 15;
/**
* <pre>
* A Smart campaign may not target proximity and location criteria
* simultaneously.
* </pre>
*
* <code>CANNOT_TARGET_BOTH_PROXIMITY_AND_LOCATION_CRITERIA_FOR_SMART_CAMPAIGN = 16;</code>
*/
public static final int CANNOT_TARGET_BOTH_PROXIMITY_AND_LOCATION_CRITERIA_FOR_SMART_CAMPAIGN_VALUE = 16;
/**
* <pre>
* A Smart campaign may not target multiple proximity criteria.
* </pre>
*
* <code>CANNOT_TARGET_MULTIPLE_PROXIMITY_CRITERIA_FOR_SMART_CAMPAIGN = 17;</code>
*/
public static final int CANNOT_TARGET_MULTIPLE_PROXIMITY_CRITERIA_FOR_SMART_CAMPAIGN_VALUE = 17;
/**
* <pre>
* Location is not launched for Local Services Campaigns.
* </pre>
*
* <code>LOCATION_NOT_LAUNCHED_FOR_LOCAL_SERVICES_CAMPAIGN = 18;</code>
*/
public static final int LOCATION_NOT_LAUNCHED_FOR_LOCAL_SERVICES_CAMPAIGN_VALUE = 18;
/**
* <pre>
* A Local Services campaign may not target certain criteria types.
* </pre>
*
* <code>LOCATION_INVALID_FOR_LOCAL_SERVICES_CAMPAIGN = 19;</code>
*/
public static final int LOCATION_INVALID_FOR_LOCAL_SERVICES_CAMPAIGN_VALUE = 19;
/**
* <pre>
* Country locations are not supported for Local Services campaign.
* </pre>
*
* <code>CANNOT_TARGET_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN = 20;</code>
*/
public static final int CANNOT_TARGET_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN_VALUE = 20;
/**
* <pre>
* Location is not within the home country of Local Services campaign.
* </pre>
*
* <code>LOCATION_NOT_IN_HOME_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN = 21;</code>
*/
public static final int LOCATION_NOT_IN_HOME_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN_VALUE = 21;
/**
* <pre>
* Local Services profile does not exist for a particular Local Services
* campaign.
* </pre>
*
* <code>CANNOT_ADD_OR_REMOVE_LOCATION_FOR_LOCAL_SERVICES_CAMPAIGN = 22;</code>
*/
public static final int CANNOT_ADD_OR_REMOVE_LOCATION_FOR_LOCAL_SERVICES_CAMPAIGN_VALUE = 22;
/**
* <pre>
* Local Services campaign must have at least one target location.
* </pre>
*
* <code>AT_LEAST_ONE_POSITIVE_LOCATION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN = 23;</code>
*/
public static final int AT_LEAST_ONE_POSITIVE_LOCATION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN_VALUE = 23;
/**
* <pre>
* At least one positive local service ID criterion is required for a Local
* Services campaign.
* </pre>
*
* <code>AT_LEAST_ONE_LOCAL_SERVICE_ID_CRITERION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN = 24;</code>
*/
public static final int AT_LEAST_ONE_LOCAL_SERVICE_ID_CRITERION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN_VALUE = 24;
/**
* <pre>
* Local service ID is not found under selected categories in local
* services campaign setting.
* </pre>
*
* <code>LOCAL_SERVICE_ID_NOT_FOUND_FOR_CATEGORY = 25;</code>
*/
public static final int LOCAL_SERVICE_ID_NOT_FOUND_FOR_CATEGORY_VALUE = 25;
/**
* <pre>
* For search advertising channel, brand lists can only be applied to
* exclusive targeting, broad match campaigns for inclusive targeting or
* PMax generated campaigns.
* </pre>
*
* <code>CANNOT_ATTACH_BRAND_LIST_TO_NON_QUALIFIED_SEARCH_CAMPAIGN = 26;</code>
*/
public static final int CANNOT_ATTACH_BRAND_LIST_TO_NON_QUALIFIED_SEARCH_CAMPAIGN_VALUE = 26;
/**
* <pre>
* Campaigns that target all countries and territories are limited to a
* certain number of top-level location exclusions. If removing a criterion
* causes the campaign to target all countries and territories and the
* campaign has more top-level location exclusions than the limit allows,
* then this error is returned.
* </pre>
*
* <code>CANNOT_REMOVE_ALL_LOCATIONS_DUE_TO_TOO_MANY_COUNTRY_EXCLUSIONS = 27;</code>
*/
public static final int CANNOT_REMOVE_ALL_LOCATIONS_DUE_TO_TOO_MANY_COUNTRY_EXCLUSIONS_VALUE = 27;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static CampaignCriterionError valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static CampaignCriterionError forNumber(int value) {
switch (value) {
case 0: return UNSPECIFIED;
case 1: return UNKNOWN;
case 2: return CONCRETE_TYPE_REQUIRED;
case 3: return INVALID_PLACEMENT_URL;
case 4: return CANNOT_EXCLUDE_CRITERIA_TYPE;
case 5: return CANNOT_SET_STATUS_FOR_CRITERIA_TYPE;
case 6: return CANNOT_SET_STATUS_FOR_EXCLUDED_CRITERIA;
case 7: return CANNOT_TARGET_AND_EXCLUDE;
case 8: return TOO_MANY_OPERATIONS;
case 9: return OPERATOR_NOT_SUPPORTED_FOR_CRITERION_TYPE;
case 10: return SHOPPING_CAMPAIGN_SALES_COUNTRY_NOT_SUPPORTED_FOR_SALES_CHANNEL;
case 11: return CANNOT_ADD_EXISTING_FIELD;
case 12: return CANNOT_UPDATE_NEGATIVE_CRITERION;
case 13: return CANNOT_SET_NEGATIVE_KEYWORD_THEME_CONSTANT_CRITERION;
case 14: return INVALID_KEYWORD_THEME_CONSTANT;
case 15: return MISSING_KEYWORD_THEME_CONSTANT_OR_FREE_FORM_KEYWORD_THEME;
case 16: return CANNOT_TARGET_BOTH_PROXIMITY_AND_LOCATION_CRITERIA_FOR_SMART_CAMPAIGN;
case 17: return CANNOT_TARGET_MULTIPLE_PROXIMITY_CRITERIA_FOR_SMART_CAMPAIGN;
case 18: return LOCATION_NOT_LAUNCHED_FOR_LOCAL_SERVICES_CAMPAIGN;
case 19: return LOCATION_INVALID_FOR_LOCAL_SERVICES_CAMPAIGN;
case 20: return CANNOT_TARGET_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN;
case 21: return LOCATION_NOT_IN_HOME_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN;
case 22: return CANNOT_ADD_OR_REMOVE_LOCATION_FOR_LOCAL_SERVICES_CAMPAIGN;
case 23: return AT_LEAST_ONE_POSITIVE_LOCATION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN;
case 24: return AT_LEAST_ONE_LOCAL_SERVICE_ID_CRITERION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN;
case 25: return LOCAL_SERVICE_ID_NOT_FOUND_FOR_CATEGORY;
case 26: return CANNOT_ATTACH_BRAND_LIST_TO_NON_QUALIFIED_SEARCH_CAMPAIGN;
case 27: return CANNOT_REMOVE_ALL_LOCATIONS_DUE_TO_TOO_MANY_COUNTRY_EXCLUSIONS;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<CampaignCriterionError>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
CampaignCriterionError> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<CampaignCriterionError>() {
public CampaignCriterionError findValueByNumber(int number) {
return CampaignCriterionError.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum.getDescriptor().getEnumTypes().get(0);
}
private static final CampaignCriterionError[] VALUES = values();
public static CampaignCriterionError valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private CampaignCriterionError(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.ads.googleads.v20.errors.CampaignCriterionErrorEnum.CampaignCriterionError)
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum)) {
return super.equals(obj);
}
com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum other = (com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Container for enum describing possible campaign criterion errors.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.errors.CampaignCriterionErrorEnum}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.errors.CampaignCriterionErrorEnum)
com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnumOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.errors.CampaignCriterionErrorProto.internal_static_google_ads_googleads_v20_errors_CampaignCriterionErrorEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.errors.CampaignCriterionErrorProto.internal_static_google_ads_googleads_v20_errors_CampaignCriterionErrorEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum.class, com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum.Builder.class);
}
// Construct using com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v20.errors.CampaignCriterionErrorProto.internal_static_google_ads_googleads_v20_errors_CampaignCriterionErrorEnum_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum getDefaultInstanceForType() {
return com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum build() {
com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum buildPartial() {
com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum result = new com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum) {
return mergeFrom((com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum other) {
if (other == com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.errors.CampaignCriterionErrorEnum)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v20.errors.CampaignCriterionErrorEnum)
private static final com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum();
}
public static com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CampaignCriterionErrorEnum>
PARSER = new com.google.protobuf.AbstractParser<CampaignCriterionErrorEnum>() {
@java.lang.Override
public CampaignCriterionErrorEnum parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CampaignCriterionErrorEnum> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CampaignCriterionErrorEnum> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v20.errors.CampaignCriterionErrorEnum getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 36,227 | google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/errors/CampaignCriterionErrorEnum.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/errors/campaign_criterion_error.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.errors;
/**
* <pre>
* Container for enum describing possible campaign criterion errors.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.errors.CampaignCriterionErrorEnum}
*/
public final class CampaignCriterionErrorEnum extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.errors.CampaignCriterionErrorEnum)
CampaignCriterionErrorEnumOrBuilder {
private static final long serialVersionUID = 0L;
// Use CampaignCriterionErrorEnum.newBuilder() to construct.
private CampaignCriterionErrorEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CampaignCriterionErrorEnum() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new CampaignCriterionErrorEnum();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.errors.CampaignCriterionErrorProto.internal_static_google_ads_googleads_v21_errors_CampaignCriterionErrorEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.errors.CampaignCriterionErrorProto.internal_static_google_ads_googleads_v21_errors_CampaignCriterionErrorEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum.class, com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum.Builder.class);
}
/**
* <pre>
* Enum describing possible campaign criterion errors.
* </pre>
*
* Protobuf enum {@code google.ads.googleads.v21.errors.CampaignCriterionErrorEnum.CampaignCriterionError}
*/
public enum CampaignCriterionError
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <pre>
* Enum unspecified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
UNSPECIFIED(0),
/**
* <pre>
* The received error code is not known in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
UNKNOWN(1),
/**
* <pre>
* Concrete type of criterion (keyword v.s. placement) is required for
* CREATE and UPDATE operations.
* </pre>
*
* <code>CONCRETE_TYPE_REQUIRED = 2;</code>
*/
CONCRETE_TYPE_REQUIRED(2),
/**
* <pre>
* Invalid placement URL.
* </pre>
*
* <code>INVALID_PLACEMENT_URL = 3;</code>
*/
INVALID_PLACEMENT_URL(3),
/**
* <pre>
* Criteria type can not be excluded for the campaign by the customer. like
* AOL account type cannot target site type criteria
* </pre>
*
* <code>CANNOT_EXCLUDE_CRITERIA_TYPE = 4;</code>
*/
CANNOT_EXCLUDE_CRITERIA_TYPE(4),
/**
* <pre>
* Cannot set the campaign criterion status for this criteria type.
* </pre>
*
* <code>CANNOT_SET_STATUS_FOR_CRITERIA_TYPE = 5;</code>
*/
CANNOT_SET_STATUS_FOR_CRITERIA_TYPE(5),
/**
* <pre>
* Cannot set the campaign criterion status for an excluded criteria.
* </pre>
*
* <code>CANNOT_SET_STATUS_FOR_EXCLUDED_CRITERIA = 6;</code>
*/
CANNOT_SET_STATUS_FOR_EXCLUDED_CRITERIA(6),
/**
* <pre>
* Cannot target and exclude the same criterion.
* </pre>
*
* <code>CANNOT_TARGET_AND_EXCLUDE = 7;</code>
*/
CANNOT_TARGET_AND_EXCLUDE(7),
/**
* <pre>
* The mutate contained too many operations.
* </pre>
*
* <code>TOO_MANY_OPERATIONS = 8;</code>
*/
TOO_MANY_OPERATIONS(8),
/**
* <pre>
* This operator cannot be applied to a criterion of this type.
* </pre>
*
* <code>OPERATOR_NOT_SUPPORTED_FOR_CRITERION_TYPE = 9;</code>
*/
OPERATOR_NOT_SUPPORTED_FOR_CRITERION_TYPE(9),
/**
* <pre>
* The Shopping campaign sales country is not supported for
* ProductSalesChannel targeting.
* </pre>
*
* <code>SHOPPING_CAMPAIGN_SALES_COUNTRY_NOT_SUPPORTED_FOR_SALES_CHANNEL = 10;</code>
*/
SHOPPING_CAMPAIGN_SALES_COUNTRY_NOT_SUPPORTED_FOR_SALES_CHANNEL(10),
/**
* <pre>
* The existing field can't be updated with CREATE operation. It can be
* updated with UPDATE operation only.
* </pre>
*
* <code>CANNOT_ADD_EXISTING_FIELD = 11;</code>
*/
CANNOT_ADD_EXISTING_FIELD(11),
/**
* <pre>
* Negative criteria are immutable, so updates are not allowed.
* </pre>
*
* <code>CANNOT_UPDATE_NEGATIVE_CRITERION = 12;</code>
*/
CANNOT_UPDATE_NEGATIVE_CRITERION(12),
/**
* <pre>
* Only free form names are allowed for negative Smart campaign keyword
* theme.
* </pre>
*
* <code>CANNOT_SET_NEGATIVE_KEYWORD_THEME_CONSTANT_CRITERION = 13;</code>
*/
CANNOT_SET_NEGATIVE_KEYWORD_THEME_CONSTANT_CRITERION(13),
/**
* <pre>
* Invalid Smart campaign keyword theme constant criterion.
* </pre>
*
* <code>INVALID_KEYWORD_THEME_CONSTANT = 14;</code>
*/
INVALID_KEYWORD_THEME_CONSTANT(14),
/**
* <pre>
* A Smart campaign keyword theme constant or free-form Smart campaign
* keyword theme is required.
* </pre>
*
* <code>MISSING_KEYWORD_THEME_CONSTANT_OR_FREE_FORM_KEYWORD_THEME = 15;</code>
*/
MISSING_KEYWORD_THEME_CONSTANT_OR_FREE_FORM_KEYWORD_THEME(15),
/**
* <pre>
* A Smart campaign may not target proximity and location criteria
* simultaneously.
* </pre>
*
* <code>CANNOT_TARGET_BOTH_PROXIMITY_AND_LOCATION_CRITERIA_FOR_SMART_CAMPAIGN = 16;</code>
*/
CANNOT_TARGET_BOTH_PROXIMITY_AND_LOCATION_CRITERIA_FOR_SMART_CAMPAIGN(16),
/**
* <pre>
* A Smart campaign may not target multiple proximity criteria.
* </pre>
*
* <code>CANNOT_TARGET_MULTIPLE_PROXIMITY_CRITERIA_FOR_SMART_CAMPAIGN = 17;</code>
*/
CANNOT_TARGET_MULTIPLE_PROXIMITY_CRITERIA_FOR_SMART_CAMPAIGN(17),
/**
* <pre>
* Location is not launched for Local Services Campaigns.
* </pre>
*
* <code>LOCATION_NOT_LAUNCHED_FOR_LOCAL_SERVICES_CAMPAIGN = 18;</code>
*/
LOCATION_NOT_LAUNCHED_FOR_LOCAL_SERVICES_CAMPAIGN(18),
/**
* <pre>
* A Local Services campaign may not target certain criteria types.
* </pre>
*
* <code>LOCATION_INVALID_FOR_LOCAL_SERVICES_CAMPAIGN = 19;</code>
*/
LOCATION_INVALID_FOR_LOCAL_SERVICES_CAMPAIGN(19),
/**
* <pre>
* Country locations are not supported for Local Services campaign.
* </pre>
*
* <code>CANNOT_TARGET_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN = 20;</code>
*/
CANNOT_TARGET_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN(20),
/**
* <pre>
* Location is not within the home country of Local Services campaign.
* </pre>
*
* <code>LOCATION_NOT_IN_HOME_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN = 21;</code>
*/
LOCATION_NOT_IN_HOME_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN(21),
/**
* <pre>
* Local Services profile does not exist for a particular Local Services
* campaign.
* </pre>
*
* <code>CANNOT_ADD_OR_REMOVE_LOCATION_FOR_LOCAL_SERVICES_CAMPAIGN = 22;</code>
*/
CANNOT_ADD_OR_REMOVE_LOCATION_FOR_LOCAL_SERVICES_CAMPAIGN(22),
/**
* <pre>
* Local Services campaign must have at least one target location.
* </pre>
*
* <code>AT_LEAST_ONE_POSITIVE_LOCATION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN = 23;</code>
*/
AT_LEAST_ONE_POSITIVE_LOCATION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN(23),
/**
* <pre>
* At least one positive local service ID criterion is required for a Local
* Services campaign.
* </pre>
*
* <code>AT_LEAST_ONE_LOCAL_SERVICE_ID_CRITERION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN = 24;</code>
*/
AT_LEAST_ONE_LOCAL_SERVICE_ID_CRITERION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN(24),
/**
* <pre>
* Local service ID is not found under selected categories in local
* services campaign setting.
* </pre>
*
* <code>LOCAL_SERVICE_ID_NOT_FOUND_FOR_CATEGORY = 25;</code>
*/
LOCAL_SERVICE_ID_NOT_FOUND_FOR_CATEGORY(25),
/**
* <pre>
* For search advertising channel, brand lists can only be applied to
* exclusive targeting, broad match campaigns for inclusive targeting or
* PMax generated campaigns.
* </pre>
*
* <code>CANNOT_ATTACH_BRAND_LIST_TO_NON_QUALIFIED_SEARCH_CAMPAIGN = 26;</code>
*/
CANNOT_ATTACH_BRAND_LIST_TO_NON_QUALIFIED_SEARCH_CAMPAIGN(26),
/**
* <pre>
* Campaigns that target all countries and territories are limited to a
* certain number of top-level location exclusions. If removing a criterion
* causes the campaign to target all countries and territories and the
* campaign has more top-level location exclusions than the limit allows,
* then this error is returned.
* </pre>
*
* <code>CANNOT_REMOVE_ALL_LOCATIONS_DUE_TO_TOO_MANY_COUNTRY_EXCLUSIONS = 27;</code>
*/
CANNOT_REMOVE_ALL_LOCATIONS_DUE_TO_TOO_MANY_COUNTRY_EXCLUSIONS(27),
UNRECOGNIZED(-1),
;
/**
* <pre>
* Enum unspecified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
public static final int UNSPECIFIED_VALUE = 0;
/**
* <pre>
* The received error code is not known in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
public static final int UNKNOWN_VALUE = 1;
/**
* <pre>
* Concrete type of criterion (keyword v.s. placement) is required for
* CREATE and UPDATE operations.
* </pre>
*
* <code>CONCRETE_TYPE_REQUIRED = 2;</code>
*/
public static final int CONCRETE_TYPE_REQUIRED_VALUE = 2;
/**
* <pre>
* Invalid placement URL.
* </pre>
*
* <code>INVALID_PLACEMENT_URL = 3;</code>
*/
public static final int INVALID_PLACEMENT_URL_VALUE = 3;
/**
* <pre>
* Criteria type can not be excluded for the campaign by the customer. like
* AOL account type cannot target site type criteria
* </pre>
*
* <code>CANNOT_EXCLUDE_CRITERIA_TYPE = 4;</code>
*/
public static final int CANNOT_EXCLUDE_CRITERIA_TYPE_VALUE = 4;
/**
* <pre>
* Cannot set the campaign criterion status for this criteria type.
* </pre>
*
* <code>CANNOT_SET_STATUS_FOR_CRITERIA_TYPE = 5;</code>
*/
public static final int CANNOT_SET_STATUS_FOR_CRITERIA_TYPE_VALUE = 5;
/**
* <pre>
* Cannot set the campaign criterion status for an excluded criteria.
* </pre>
*
* <code>CANNOT_SET_STATUS_FOR_EXCLUDED_CRITERIA = 6;</code>
*/
public static final int CANNOT_SET_STATUS_FOR_EXCLUDED_CRITERIA_VALUE = 6;
/**
* <pre>
* Cannot target and exclude the same criterion.
* </pre>
*
* <code>CANNOT_TARGET_AND_EXCLUDE = 7;</code>
*/
public static final int CANNOT_TARGET_AND_EXCLUDE_VALUE = 7;
/**
* <pre>
* The mutate contained too many operations.
* </pre>
*
* <code>TOO_MANY_OPERATIONS = 8;</code>
*/
public static final int TOO_MANY_OPERATIONS_VALUE = 8;
/**
* <pre>
* This operator cannot be applied to a criterion of this type.
* </pre>
*
* <code>OPERATOR_NOT_SUPPORTED_FOR_CRITERION_TYPE = 9;</code>
*/
public static final int OPERATOR_NOT_SUPPORTED_FOR_CRITERION_TYPE_VALUE = 9;
/**
* <pre>
* The Shopping campaign sales country is not supported for
* ProductSalesChannel targeting.
* </pre>
*
* <code>SHOPPING_CAMPAIGN_SALES_COUNTRY_NOT_SUPPORTED_FOR_SALES_CHANNEL = 10;</code>
*/
public static final int SHOPPING_CAMPAIGN_SALES_COUNTRY_NOT_SUPPORTED_FOR_SALES_CHANNEL_VALUE = 10;
/**
* <pre>
* The existing field can't be updated with CREATE operation. It can be
* updated with UPDATE operation only.
* </pre>
*
* <code>CANNOT_ADD_EXISTING_FIELD = 11;</code>
*/
public static final int CANNOT_ADD_EXISTING_FIELD_VALUE = 11;
/**
* <pre>
* Negative criteria are immutable, so updates are not allowed.
* </pre>
*
* <code>CANNOT_UPDATE_NEGATIVE_CRITERION = 12;</code>
*/
public static final int CANNOT_UPDATE_NEGATIVE_CRITERION_VALUE = 12;
/**
* <pre>
* Only free form names are allowed for negative Smart campaign keyword
* theme.
* </pre>
*
* <code>CANNOT_SET_NEGATIVE_KEYWORD_THEME_CONSTANT_CRITERION = 13;</code>
*/
public static final int CANNOT_SET_NEGATIVE_KEYWORD_THEME_CONSTANT_CRITERION_VALUE = 13;
/**
* <pre>
* Invalid Smart campaign keyword theme constant criterion.
* </pre>
*
* <code>INVALID_KEYWORD_THEME_CONSTANT = 14;</code>
*/
public static final int INVALID_KEYWORD_THEME_CONSTANT_VALUE = 14;
/**
* <pre>
* A Smart campaign keyword theme constant or free-form Smart campaign
* keyword theme is required.
* </pre>
*
* <code>MISSING_KEYWORD_THEME_CONSTANT_OR_FREE_FORM_KEYWORD_THEME = 15;</code>
*/
public static final int MISSING_KEYWORD_THEME_CONSTANT_OR_FREE_FORM_KEYWORD_THEME_VALUE = 15;
/**
* <pre>
* A Smart campaign may not target proximity and location criteria
* simultaneously.
* </pre>
*
* <code>CANNOT_TARGET_BOTH_PROXIMITY_AND_LOCATION_CRITERIA_FOR_SMART_CAMPAIGN = 16;</code>
*/
public static final int CANNOT_TARGET_BOTH_PROXIMITY_AND_LOCATION_CRITERIA_FOR_SMART_CAMPAIGN_VALUE = 16;
/**
* <pre>
* A Smart campaign may not target multiple proximity criteria.
* </pre>
*
* <code>CANNOT_TARGET_MULTIPLE_PROXIMITY_CRITERIA_FOR_SMART_CAMPAIGN = 17;</code>
*/
public static final int CANNOT_TARGET_MULTIPLE_PROXIMITY_CRITERIA_FOR_SMART_CAMPAIGN_VALUE = 17;
/**
* <pre>
* Location is not launched for Local Services Campaigns.
* </pre>
*
* <code>LOCATION_NOT_LAUNCHED_FOR_LOCAL_SERVICES_CAMPAIGN = 18;</code>
*/
public static final int LOCATION_NOT_LAUNCHED_FOR_LOCAL_SERVICES_CAMPAIGN_VALUE = 18;
/**
* <pre>
* A Local Services campaign may not target certain criteria types.
* </pre>
*
* <code>LOCATION_INVALID_FOR_LOCAL_SERVICES_CAMPAIGN = 19;</code>
*/
public static final int LOCATION_INVALID_FOR_LOCAL_SERVICES_CAMPAIGN_VALUE = 19;
/**
* <pre>
* Country locations are not supported for Local Services campaign.
* </pre>
*
* <code>CANNOT_TARGET_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN = 20;</code>
*/
public static final int CANNOT_TARGET_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN_VALUE = 20;
/**
* <pre>
* Location is not within the home country of Local Services campaign.
* </pre>
*
* <code>LOCATION_NOT_IN_HOME_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN = 21;</code>
*/
public static final int LOCATION_NOT_IN_HOME_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN_VALUE = 21;
/**
* <pre>
* Local Services profile does not exist for a particular Local Services
* campaign.
* </pre>
*
* <code>CANNOT_ADD_OR_REMOVE_LOCATION_FOR_LOCAL_SERVICES_CAMPAIGN = 22;</code>
*/
public static final int CANNOT_ADD_OR_REMOVE_LOCATION_FOR_LOCAL_SERVICES_CAMPAIGN_VALUE = 22;
/**
* <pre>
* Local Services campaign must have at least one target location.
* </pre>
*
* <code>AT_LEAST_ONE_POSITIVE_LOCATION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN = 23;</code>
*/
public static final int AT_LEAST_ONE_POSITIVE_LOCATION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN_VALUE = 23;
/**
* <pre>
* At least one positive local service ID criterion is required for a Local
* Services campaign.
* </pre>
*
* <code>AT_LEAST_ONE_LOCAL_SERVICE_ID_CRITERION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN = 24;</code>
*/
public static final int AT_LEAST_ONE_LOCAL_SERVICE_ID_CRITERION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN_VALUE = 24;
/**
* <pre>
* Local service ID is not found under selected categories in local
* services campaign setting.
* </pre>
*
* <code>LOCAL_SERVICE_ID_NOT_FOUND_FOR_CATEGORY = 25;</code>
*/
public static final int LOCAL_SERVICE_ID_NOT_FOUND_FOR_CATEGORY_VALUE = 25;
/**
* <pre>
* For search advertising channel, brand lists can only be applied to
* exclusive targeting, broad match campaigns for inclusive targeting or
* PMax generated campaigns.
* </pre>
*
* <code>CANNOT_ATTACH_BRAND_LIST_TO_NON_QUALIFIED_SEARCH_CAMPAIGN = 26;</code>
*/
public static final int CANNOT_ATTACH_BRAND_LIST_TO_NON_QUALIFIED_SEARCH_CAMPAIGN_VALUE = 26;
/**
* <pre>
* Campaigns that target all countries and territories are limited to a
* certain number of top-level location exclusions. If removing a criterion
* causes the campaign to target all countries and territories and the
* campaign has more top-level location exclusions than the limit allows,
* then this error is returned.
* </pre>
*
* <code>CANNOT_REMOVE_ALL_LOCATIONS_DUE_TO_TOO_MANY_COUNTRY_EXCLUSIONS = 27;</code>
*/
public static final int CANNOT_REMOVE_ALL_LOCATIONS_DUE_TO_TOO_MANY_COUNTRY_EXCLUSIONS_VALUE = 27;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static CampaignCriterionError valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static CampaignCriterionError forNumber(int value) {
switch (value) {
case 0: return UNSPECIFIED;
case 1: return UNKNOWN;
case 2: return CONCRETE_TYPE_REQUIRED;
case 3: return INVALID_PLACEMENT_URL;
case 4: return CANNOT_EXCLUDE_CRITERIA_TYPE;
case 5: return CANNOT_SET_STATUS_FOR_CRITERIA_TYPE;
case 6: return CANNOT_SET_STATUS_FOR_EXCLUDED_CRITERIA;
case 7: return CANNOT_TARGET_AND_EXCLUDE;
case 8: return TOO_MANY_OPERATIONS;
case 9: return OPERATOR_NOT_SUPPORTED_FOR_CRITERION_TYPE;
case 10: return SHOPPING_CAMPAIGN_SALES_COUNTRY_NOT_SUPPORTED_FOR_SALES_CHANNEL;
case 11: return CANNOT_ADD_EXISTING_FIELD;
case 12: return CANNOT_UPDATE_NEGATIVE_CRITERION;
case 13: return CANNOT_SET_NEGATIVE_KEYWORD_THEME_CONSTANT_CRITERION;
case 14: return INVALID_KEYWORD_THEME_CONSTANT;
case 15: return MISSING_KEYWORD_THEME_CONSTANT_OR_FREE_FORM_KEYWORD_THEME;
case 16: return CANNOT_TARGET_BOTH_PROXIMITY_AND_LOCATION_CRITERIA_FOR_SMART_CAMPAIGN;
case 17: return CANNOT_TARGET_MULTIPLE_PROXIMITY_CRITERIA_FOR_SMART_CAMPAIGN;
case 18: return LOCATION_NOT_LAUNCHED_FOR_LOCAL_SERVICES_CAMPAIGN;
case 19: return LOCATION_INVALID_FOR_LOCAL_SERVICES_CAMPAIGN;
case 20: return CANNOT_TARGET_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN;
case 21: return LOCATION_NOT_IN_HOME_COUNTRY_FOR_LOCAL_SERVICES_CAMPAIGN;
case 22: return CANNOT_ADD_OR_REMOVE_LOCATION_FOR_LOCAL_SERVICES_CAMPAIGN;
case 23: return AT_LEAST_ONE_POSITIVE_LOCATION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN;
case 24: return AT_LEAST_ONE_LOCAL_SERVICE_ID_CRITERION_REQUIRED_FOR_LOCAL_SERVICES_CAMPAIGN;
case 25: return LOCAL_SERVICE_ID_NOT_FOUND_FOR_CATEGORY;
case 26: return CANNOT_ATTACH_BRAND_LIST_TO_NON_QUALIFIED_SEARCH_CAMPAIGN;
case 27: return CANNOT_REMOVE_ALL_LOCATIONS_DUE_TO_TOO_MANY_COUNTRY_EXCLUSIONS;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<CampaignCriterionError>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
CampaignCriterionError> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<CampaignCriterionError>() {
public CampaignCriterionError findValueByNumber(int number) {
return CampaignCriterionError.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum.getDescriptor().getEnumTypes().get(0);
}
private static final CampaignCriterionError[] VALUES = values();
public static CampaignCriterionError valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private CampaignCriterionError(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.ads.googleads.v21.errors.CampaignCriterionErrorEnum.CampaignCriterionError)
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum other = (com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Container for enum describing possible campaign criterion errors.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.errors.CampaignCriterionErrorEnum}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.errors.CampaignCriterionErrorEnum)
com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnumOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.errors.CampaignCriterionErrorProto.internal_static_google_ads_googleads_v21_errors_CampaignCriterionErrorEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.errors.CampaignCriterionErrorProto.internal_static_google_ads_googleads_v21_errors_CampaignCriterionErrorEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum.class, com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum.Builder.class);
}
// Construct using com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.errors.CampaignCriterionErrorProto.internal_static_google_ads_googleads_v21_errors_CampaignCriterionErrorEnum_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum getDefaultInstanceForType() {
return com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum build() {
com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum buildPartial() {
com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum result = new com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum) {
return mergeFrom((com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum other) {
if (other == com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.errors.CampaignCriterionErrorEnum)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.errors.CampaignCriterionErrorEnum)
private static final com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum();
}
public static com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CampaignCriterionErrorEnum>
PARSER = new com.google.protobuf.AbstractParser<CampaignCriterionErrorEnum>() {
@java.lang.Override
public CampaignCriterionErrorEnum parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CampaignCriterionErrorEnum> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CampaignCriterionErrorEnum> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.errors.CampaignCriterionErrorEnum getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,002 | java-apigee-registry/proto-google-cloud-apigee-registry-v1/src/main/java/com/google/cloud/apigeeregistry/v1/ListApisRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/apigeeregistry/v1/registry_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.apigeeregistry.v1;
/**
*
*
* <pre>
* Request message for ListApis.
* </pre>
*
* Protobuf type {@code google.cloud.apigeeregistry.v1.ListApisRequest}
*/
public final class ListApisRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.apigeeregistry.v1.ListApisRequest)
ListApisRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListApisRequest.newBuilder() to construct.
private ListApisRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListApisRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListApisRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApisRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApisRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apigeeregistry.v1.ListApisRequest.class,
com.google.cloud.apigeeregistry.v1.ListApisRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent, which owns this collection of APIs.
* Format: `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of APIs.
* Format: `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of APIs to return.
* The service may return fewer than this value.
* If unspecified, at most 50 values will be returned.
* The maximum is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token, received from a previous `ListApis` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApis` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListApis` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApis` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.apigeeregistry.v1.ListApisRequest)) {
return super.equals(obj);
}
com.google.cloud.apigeeregistry.v1.ListApisRequest other =
(com.google.cloud.apigeeregistry.v1.ListApisRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.apigeeregistry.v1.ListApisRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.ListApisRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApisRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.ListApisRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApisRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.ListApisRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApisRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.ListApisRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApisRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.ListApisRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApisRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.ListApisRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.apigeeregistry.v1.ListApisRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for ListApis.
* </pre>
*
* Protobuf type {@code google.cloud.apigeeregistry.v1.ListApisRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.apigeeregistry.v1.ListApisRequest)
com.google.cloud.apigeeregistry.v1.ListApisRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApisRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApisRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apigeeregistry.v1.ListApisRequest.class,
com.google.cloud.apigeeregistry.v1.ListApisRequest.Builder.class);
}
// Construct using com.google.cloud.apigeeregistry.v1.ListApisRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApisRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListApisRequest getDefaultInstanceForType() {
return com.google.cloud.apigeeregistry.v1.ListApisRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListApisRequest build() {
com.google.cloud.apigeeregistry.v1.ListApisRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListApisRequest buildPartial() {
com.google.cloud.apigeeregistry.v1.ListApisRequest result =
new com.google.cloud.apigeeregistry.v1.ListApisRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.apigeeregistry.v1.ListApisRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.apigeeregistry.v1.ListApisRequest) {
return mergeFrom((com.google.cloud.apigeeregistry.v1.ListApisRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.apigeeregistry.v1.ListApisRequest other) {
if (other == com.google.cloud.apigeeregistry.v1.ListApisRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent, which owns this collection of APIs.
* Format: `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of APIs.
* Format: `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of APIs.
* Format: `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of APIs.
* Format: `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of APIs.
* Format: `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of APIs to return.
* The service may return fewer than this value.
* If unspecified, at most 50 values will be returned.
* The maximum is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of APIs to return.
* The service may return fewer than this value.
* If unspecified, at most 50 values will be returned.
* The maximum is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of APIs to return.
* The service may return fewer than this value.
* If unspecified, at most 50 values will be returned.
* The maximum is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token, received from a previous `ListApis` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApis` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListApis` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApis` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListApis` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApis` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token, received from a previous `ListApis` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApis` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token, received from a previous `ListApis` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApis` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.apigeeregistry.v1.ListApisRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.apigeeregistry.v1.ListApisRequest)
private static final com.google.cloud.apigeeregistry.v1.ListApisRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.apigeeregistry.v1.ListApisRequest();
}
public static com.google.cloud.apigeeregistry.v1.ListApisRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListApisRequest> PARSER =
new com.google.protobuf.AbstractParser<ListApisRequest>() {
@java.lang.Override
public ListApisRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListApisRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListApisRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListApisRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,109 | java-resourcemanager/proto-google-cloud-resourcemanager-v3/src/main/java/com/google/cloud/resourcemanager/v3/SearchOrganizationsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/resourcemanager/v3/organizations.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.resourcemanager.v3;
/**
*
*
* <pre>
* The request sent to the `SearchOrganizations` method.
* </pre>
*
* Protobuf type {@code google.cloud.resourcemanager.v3.SearchOrganizationsRequest}
*/
public final class SearchOrganizationsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.resourcemanager.v3.SearchOrganizationsRequest)
SearchOrganizationsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use SearchOrganizationsRequest.newBuilder() to construct.
private SearchOrganizationsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SearchOrganizationsRequest() {
pageToken_ = "";
query_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SearchOrganizationsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.resourcemanager.v3.OrganizationsProto
.internal_static_google_cloud_resourcemanager_v3_SearchOrganizationsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.resourcemanager.v3.OrganizationsProto
.internal_static_google_cloud_resourcemanager_v3_SearchOrganizationsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest.class,
com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest.Builder.class);
}
public static final int PAGE_SIZE_FIELD_NUMBER = 1;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. The maximum number of organizations to return in the response.
* The server can return fewer organizations than requested. If unspecified,
* server picks an appropriate default.
* </pre>
*
* <code>int32 page_size = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous call to
* `SearchOrganizations` that indicates from where listing should continue.
* </pre>
*
* <code>string page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous call to
* `SearchOrganizations` that indicates from where listing should continue.
* </pre>
*
* <code>string page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int QUERY_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object query_ = "";
/**
*
*
* <pre>
* Optional. An optional query string used to filter the Organizations to
* return in the response. Query rules are case-insensitive.
*
*
* ```
* | Field | Description |
* |------------------|--------------------------------------------|
* | directoryCustomerId, owner.directoryCustomerId | Filters by directory
* customer id. |
* | domain | Filters by domain. |
* ```
*
* Organizations may be queried by `directoryCustomerId` or by
* `domain`, where the domain is a G Suite domain, for example:
*
* * Query `directorycustomerid:123456789` returns Organization
* resources with `owner.directory_customer_id` equal to `123456789`.
* * Query `domain:google.com` returns Organization resources corresponding
* to the domain `google.com`.
* </pre>
*
* <code>string query = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The query.
*/
@java.lang.Override
public java.lang.String getQuery() {
java.lang.Object ref = query_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
query_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. An optional query string used to filter the Organizations to
* return in the response. Query rules are case-insensitive.
*
*
* ```
* | Field | Description |
* |------------------|--------------------------------------------|
* | directoryCustomerId, owner.directoryCustomerId | Filters by directory
* customer id. |
* | domain | Filters by domain. |
* ```
*
* Organizations may be queried by `directoryCustomerId` or by
* `domain`, where the domain is a G Suite domain, for example:
*
* * Query `directorycustomerid:123456789` returns Organization
* resources with `owner.directory_customer_id` equal to `123456789`.
* * Query `domain:google.com` returns Organization resources corresponding
* to the domain `google.com`.
* </pre>
*
* <code>string query = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for query.
*/
@java.lang.Override
public com.google.protobuf.ByteString getQueryBytes() {
java.lang.Object ref = query_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
query_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (pageSize_ != 0) {
output.writeInt32(1, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(query_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, query_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(1, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(query_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, query_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest)) {
return super.equals(obj);
}
com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest other =
(com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest) obj;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getQuery().equals(other.getQuery())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + QUERY_FIELD_NUMBER;
hash = (53 * hash) + getQuery().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request sent to the `SearchOrganizations` method.
* </pre>
*
* Protobuf type {@code google.cloud.resourcemanager.v3.SearchOrganizationsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.resourcemanager.v3.SearchOrganizationsRequest)
com.google.cloud.resourcemanager.v3.SearchOrganizationsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.resourcemanager.v3.OrganizationsProto
.internal_static_google_cloud_resourcemanager_v3_SearchOrganizationsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.resourcemanager.v3.OrganizationsProto
.internal_static_google_cloud_resourcemanager_v3_SearchOrganizationsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest.class,
com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest.Builder.class);
}
// Construct using com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
pageSize_ = 0;
pageToken_ = "";
query_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.resourcemanager.v3.OrganizationsProto
.internal_static_google_cloud_resourcemanager_v3_SearchOrganizationsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest
getDefaultInstanceForType() {
return com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest build() {
com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest buildPartial() {
com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest result =
new com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.query_ = query_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest) {
return mergeFrom((com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest other) {
if (other
== com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest.getDefaultInstance())
return this;
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getQuery().isEmpty()) {
query_ = other.query_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
query_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int pageSize_;
/**
*
*
* <pre>
* Optional. The maximum number of organizations to return in the response.
* The server can return fewer organizations than requested. If unspecified,
* server picks an appropriate default.
* </pre>
*
* <code>int32 page_size = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. The maximum number of organizations to return in the response.
* The server can return fewer organizations than requested. If unspecified,
* server picks an appropriate default.
* </pre>
*
* <code>int32 page_size = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum number of organizations to return in the response.
* The server can return fewer organizations than requested. If unspecified,
* server picks an appropriate default.
* </pre>
*
* <code>int32 page_size = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000001);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous call to
* `SearchOrganizations` that indicates from where listing should continue.
* </pre>
*
* <code>string page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous call to
* `SearchOrganizations` that indicates from where listing should continue.
* </pre>
*
* <code>string page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous call to
* `SearchOrganizations` that indicates from where listing should continue.
* </pre>
*
* <code>string page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous call to
* `SearchOrganizations` that indicates from where listing should continue.
* </pre>
*
* <code>string page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous call to
* `SearchOrganizations` that indicates from where listing should continue.
* </pre>
*
* <code>string page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object query_ = "";
/**
*
*
* <pre>
* Optional. An optional query string used to filter the Organizations to
* return in the response. Query rules are case-insensitive.
*
*
* ```
* | Field | Description |
* |------------------|--------------------------------------------|
* | directoryCustomerId, owner.directoryCustomerId | Filters by directory
* customer id. |
* | domain | Filters by domain. |
* ```
*
* Organizations may be queried by `directoryCustomerId` or by
* `domain`, where the domain is a G Suite domain, for example:
*
* * Query `directorycustomerid:123456789` returns Organization
* resources with `owner.directory_customer_id` equal to `123456789`.
* * Query `domain:google.com` returns Organization resources corresponding
* to the domain `google.com`.
* </pre>
*
* <code>string query = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The query.
*/
public java.lang.String getQuery() {
java.lang.Object ref = query_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
query_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. An optional query string used to filter the Organizations to
* return in the response. Query rules are case-insensitive.
*
*
* ```
* | Field | Description |
* |------------------|--------------------------------------------|
* | directoryCustomerId, owner.directoryCustomerId | Filters by directory
* customer id. |
* | domain | Filters by domain. |
* ```
*
* Organizations may be queried by `directoryCustomerId` or by
* `domain`, where the domain is a G Suite domain, for example:
*
* * Query `directorycustomerid:123456789` returns Organization
* resources with `owner.directory_customer_id` equal to `123456789`.
* * Query `domain:google.com` returns Organization resources corresponding
* to the domain `google.com`.
* </pre>
*
* <code>string query = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for query.
*/
public com.google.protobuf.ByteString getQueryBytes() {
java.lang.Object ref = query_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
query_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. An optional query string used to filter the Organizations to
* return in the response. Query rules are case-insensitive.
*
*
* ```
* | Field | Description |
* |------------------|--------------------------------------------|
* | directoryCustomerId, owner.directoryCustomerId | Filters by directory
* customer id. |
* | domain | Filters by domain. |
* ```
*
* Organizations may be queried by `directoryCustomerId` or by
* `domain`, where the domain is a G Suite domain, for example:
*
* * Query `directorycustomerid:123456789` returns Organization
* resources with `owner.directory_customer_id` equal to `123456789`.
* * Query `domain:google.com` returns Organization resources corresponding
* to the domain `google.com`.
* </pre>
*
* <code>string query = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The query to set.
* @return This builder for chaining.
*/
public Builder setQuery(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
query_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. An optional query string used to filter the Organizations to
* return in the response. Query rules are case-insensitive.
*
*
* ```
* | Field | Description |
* |------------------|--------------------------------------------|
* | directoryCustomerId, owner.directoryCustomerId | Filters by directory
* customer id. |
* | domain | Filters by domain. |
* ```
*
* Organizations may be queried by `directoryCustomerId` or by
* `domain`, where the domain is a G Suite domain, for example:
*
* * Query `directorycustomerid:123456789` returns Organization
* resources with `owner.directory_customer_id` equal to `123456789`.
* * Query `domain:google.com` returns Organization resources corresponding
* to the domain `google.com`.
* </pre>
*
* <code>string query = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearQuery() {
query_ = getDefaultInstance().getQuery();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. An optional query string used to filter the Organizations to
* return in the response. Query rules are case-insensitive.
*
*
* ```
* | Field | Description |
* |------------------|--------------------------------------------|
* | directoryCustomerId, owner.directoryCustomerId | Filters by directory
* customer id. |
* | domain | Filters by domain. |
* ```
*
* Organizations may be queried by `directoryCustomerId` or by
* `domain`, where the domain is a G Suite domain, for example:
*
* * Query `directorycustomerid:123456789` returns Organization
* resources with `owner.directory_customer_id` equal to `123456789`.
* * Query `domain:google.com` returns Organization resources corresponding
* to the domain `google.com`.
* </pre>
*
* <code>string query = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for query to set.
* @return This builder for chaining.
*/
public Builder setQueryBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
query_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.resourcemanager.v3.SearchOrganizationsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.resourcemanager.v3.SearchOrganizationsRequest)
private static final com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest();
}
public static com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SearchOrganizationsRequest> PARSER =
new com.google.protobuf.AbstractParser<SearchOrganizationsRequest>() {
@java.lang.Override
public SearchOrganizationsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SearchOrganizationsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SearchOrganizationsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.resourcemanager.v3.SearchOrganizationsRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googlearchive/caja | 36,120 | src/com/google/caja/parser/html/Nodes.java | // Copyright (C) 2009 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.caja.parser.html;
import com.google.caja.lexer.FilePosition;
import com.google.caja.lexer.HtmlEntities;
import com.google.caja.lexer.HtmlTextEscapingMode;
import com.google.caja.lexer.HtmlTokenType;
import com.google.caja.lexer.InputSource;
import com.google.caja.lexer.TokenConsumer;
import com.google.caja.lexer.escaping.Escaping;
import com.google.caja.render.Concatenator;
import com.google.caja.reporting.MarkupRenderMode;
import com.google.caja.reporting.RenderContext;
import com.google.caja.util.SparseBitSet;
import com.google.caja.util.Strings;
import com.google.common.collect.ImmutableSet;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.Set;
import javax.annotation.Nullable;
import org.w3c.dom.Attr;
import org.w3c.dom.DocumentType;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.ProcessingInstruction;
import org.w3c.dom.Text;
import org.w3c.dom.UserDataHandler;
/**
* Utilities for dealing with HTML/XML DOM trees.
*
* WARNING: The renderUnsafe methods in this class are unsafe for cajoled
* code because the Caja pipeline does not sanitize comments. In particular
* IE comments rendered by renderUnsafe will be executable.
*
* @author mikesamuel@gmail.com
*/
public class Nodes {
private static final String FP_KEY = "caja:filePosition";
private static final String RAW_TEXT_KEY = "caja:rawHtml";
private static final String HAS_XMLNS_DECLARATION_KEY = "caja:hasXmlns";
/**
* Used to copy the has-xmlns-declaration bit from an element to elements
* cloned/imported from it.
*/
private static final UserDataHandler HAS_XMLNS_DECLARATION_DATA_HANDLER
= new UserDataHandler() {
public void handle(
short operation, String key, Object data, Node src, Node dest) {
switch (operation) {
case UserDataHandler.NODE_CLONED:
case UserDataHandler.NODE_IMPORTED:
dest.setUserData(HAS_XMLNS_DECLARATION_KEY, Boolean.TRUE, this);
break;
}
}
};
/** A left to right {@link Iterable} over the children of the given node. */
public static Iterable<? extends Node> childrenOf(final Node n) {
return new Iterable<Node>() {
public Iterator<Node> iterator() {
return new Iterator<Node>() {
Node child = n.getFirstChild();
public boolean hasNext() {
return child != null;
}
public Node next() {
if (child == null) { throw new NoSuchElementException(); }
Node result = child;
child = child.getNextSibling();
return result;
}
public void remove() { throw new UnsupportedOperationException(); }
};
}
};
}
/** An {@link Iterable} over the attributes of the given element. */
public static Iterable<? extends Attr> attributesOf(final Element el) {
return new Iterable<Attr>() {
public Iterator<Attr> iterator() {
return new Iterator<Attr>() {
NamedNodeMap attrs = el.getAttributes();
int i = 0;
// The DOM spec says that elements with no attributes should return
// null, though Xerces returns an empty map.
int n = attrs != null ? attrs.getLength() : 0;
public boolean hasNext() {
return i < n;
}
public Attr next() {
if (i == n) { throw new NoSuchElementException(); }
return (Attr) attrs.item(i++);
}
public void remove() { throw new UnsupportedOperationException(); }
};
}
};
}
/**
* An {@link Iterable} over the elements of the given node list.
* @throws ClassCastException if a member is fetched that is not an instance
* of outType/
*/
public static <T extends Node> Iterable<T> nodeListIterable(
final NodeList nl, final Class<? extends T> outType) {
return new Iterable<T>() {
public Iterator<T> iterator() {
return new Iterator<T>() {
int i = 0, n = nl != null ? nl.getLength() : 0;
public boolean hasNext() {
return i < n;
}
public T next() {
if (i == n) { throw new NoSuchElementException(); }
return outType.cast(nl.item(i++));
}
public void remove() { throw new UnsupportedOperationException(); }
};
}
};
}
private static final FilePosition UNKNOWN_START_OF_FILE
= FilePosition.startOfFile(InputSource.UNKNOWN);
/**
* Returns the last file position associated with the given node by
* {@link #setFilePositionFor} or related methods.
*/
public static FilePosition getFilePositionFor(Node node) {
FilePosition pos = (FilePosition) node.getUserData(FP_KEY);
if (pos != null) { return pos; }
return UNKNOWN_START_OF_FILE;
}
/** The file position of the value of the given attribute. */
public static FilePosition getFilePositionForValue(Attr a) {
return getFilePositionFor(a.getFirstChild());
}
/** @see #getFilePositionFor */
public static void setFilePositionFor(Node node, FilePosition pos) {
node.setUserData(FP_KEY, pos, null);
}
public static void setFilePositionForValue(Attr a, FilePosition pos) {
setFilePositionFor(a.getFirstChild(), pos);
}
/**
* @see #hasXmlnsDeclaration(Element)
*/
public static void markAsHavingXmlnsDeclaration(Element el) {
el.setUserData(
HAS_XMLNS_DECLARATION_KEY, Boolean.TRUE,
HAS_XMLNS_DECLARATION_DATA_HANDLER);
}
/**
* True for elements that had an {@code xmlns="<namespace-uri>"} declaration
* when parsed so that the renderer can include one where it is safe to do so.
*/
public static boolean hasXmlnsDeclaration(Element el) {
return Boolean.TRUE.equals(el.getUserData(HAS_XMLNS_DECLARATION_KEY));
}
/**
* Associates the HTML textual value as parsed with the given attribute.
* If a client may have called {@link Node#setNodeValue(String)} or an alias
* since parsing, the output should not be trusted.
*/
public static void setRawValue(Attr a, String html) {
setRawText((Text) a.getFirstChild(), html);
}
/** @see #setRawValue */
public static String getRawValue(Attr a) {
return getRawText((Text) a.getFirstChild());
}
/**
* Associates the HTML textual value as parsed with the given node.
* If a client may have called {@link Node#setNodeValue(String)} or an alias
* since parsing, the output should not be trusted.
*/
public static void setRawText(Text cd, String html) {
cd.setUserData(RAW_TEXT_KEY, html, null);
}
/** @see #setRawText */
public static String getRawText(Text cd) {
return (String) cd.getUserData(RAW_TEXT_KEY);
}
/**
* Replace entity references in HTML CDATA with their plain text equivalents.
*/
public static String decode(String html) {
if (html.indexOf('&') < 0) { return html; }
char[] chars = html.toCharArray();
int delta = 0;
int n = chars.length;
for (int i = 0; i < n;) {
char ch = chars[i];
if (chars[i] == '&') {
long packedEndAndCodepoint = HtmlEntities.decodeEntityAt(chars, i, n);
int end = (int) (packedEndAndCodepoint >>> 32);
if (end != i + 1) {
int codepoint = ((int) packedEndAndCodepoint) & 0xffffff;
delta += end - (i + Character.toChars(codepoint, chars, i - delta));
i = end;
} else {
chars[i - delta] = ch;
++i;
}
} else {
chars[i - delta] = ch;
++i;
}
}
if (delta == 0) { return html; }
return String.valueOf(chars, 0, n - delta);
}
/**
* Convert HTML to plain text by replacing HTML special characters with HTML
* entities.
*/
public static String encode(String raw) {
StringBuilder sb = new StringBuilder((raw.length() * 3) / 2);
Escaping.escapeXml(raw, false, sb);
return sb.toString();
}
/**
* Serializes the given DOM node to HTML or XML.
* @param rc a context where the token consumer is typically a
* {@link Concatenator}, and the {@link RenderContext#asXml} is significant.
*/
public static void render(Node node, Namespaces ns, RenderContext rc) {
render(node, ns, rc, false);
}
/**
* @deprecated For use only by non-caja clients of the parser/render
*/
@Deprecated
public static void renderUnsafe(DocumentType docType, Node node,
Namespaces ns, RenderContext rc) {
render(docType, node, ns, rc, true);
}
public static void render(DocumentType docType, Node node, Namespaces ns,
RenderContext rc) {
render(docType, node, ns, rc, false);
}
/**
* @deprecated For use only by non-caja clients of the parser/render
*/
@Deprecated
public static void renderUnsafe(Node node, Namespaces ns, RenderContext rc) {
render(node, ns, rc, true);
}
private static void render(Node node, Namespaces ns,
RenderContext rc, boolean renderUnsafe) {
render(null, node, ns, rc, renderUnsafe);
}
private static void render(
DocumentType docType, Node node, Namespaces ns, RenderContext rc,
boolean renderUnsafe) {
StringBuilder sb = new StringBuilder(1 << 18);
if (null != docType) {
String rendering = renderDocumentType(docType);
if (null != rendering) {
sb.append(rendering);
}
}
new Renderer(rc, sb, rc.markupRenderMode(), ns)
.render(node, ns, renderUnsafe);
TokenConsumer out = rc.getOut();
FilePosition pos = getFilePositionFor(node);
out.mark(FilePosition.startOf(pos));
out.consume(sb.toString());
out.mark(FilePosition.endOf(pos));
}
/**
* Returns a rendering of document type. This is handled explicitly here
* rather than in {@link Nodes#render(Node, MarkupRenderMode)} to avoid
* rendering a document type in the middle of a document.
*
* @return null if nothing to render or docType is invalid.
*/
private static @Nullable String renderDocumentType(DocumentType docType) {
String publicId = docType.getPublicId();
String systemId = docType.getSystemId();
String nodeName;
if (null != docType.getOwnerDocument() &&
null != docType.getOwnerDocument().getDocumentElement() &&
null != docType.getOwnerDocument().getDocumentElement().getNodeName()) {
nodeName = docType.getOwnerDocument()
.getDocumentElement()
.getNodeName();
} else {
return null;
}
if (!DoctypeMaker.isHtml(nodeName, publicId, systemId)) {
return null;
}
StringBuilder sb = new StringBuilder();
sb.append("<!DOCTYPE ").append(nodeName);
// The Name in the document type declaration must match the element type
// of the root element.
if (null != publicId && publicId.length() > 0) {
sb.append(" PUBLIC ")
.append('"')
.append(publicId.replaceAll("\"", "%22"))
.append('"');
}
if (null != systemId && systemId.length() > 0) {
// Sanity check - system urls should parse as an absolute uris
try {
URI u = new URI(systemId);
if (u.isAbsolute() &&
("http".equals(u.getScheme()) || "https".equals(u.getScheme()))) {
sb.append(" ")
.append('"')
.append(systemId.replaceAll("\"", "%22"))
.append('"');
}
} catch (URISyntaxException e) {
return null;
}
}
sb.append(">");
return sb.toString();
}
/**
* Serializes the given DOM node to HTML or XML.
* @param rc a context where the token consumer is typically a
* {@link Concatenator}, and the {@link RenderContext#asXml} is significant.
*/
public static void render(Node node, RenderContext rc) {
render(node, rc, false);
}
/**
* @deprecated For use only by non-caja clients of the parser/render
*/
@Deprecated
public static void renderUnsafe(Node node, RenderContext rc) {
render(node, Namespaces.HTML_DEFAULT, rc, true);
}
private static void render(Node node, RenderContext rc,
boolean renderUnsafe) {
render(node, Namespaces.HTML_DEFAULT, rc, renderUnsafe);
}
public static String render(Node node) {
return render(node, false);
}
/**
* @deprecated For use only by non-caja clients of the parser/render
*/
@Deprecated
public static String renderUnsafe(Node node) {
return renderUnsafe(node, MarkupRenderMode.HTML);
}
@Deprecated
public static String render(Node node, boolean asXml) {
return render(node, asXml ? MarkupRenderMode.XML : MarkupRenderMode.HTML);
}
public static String render(Node node, MarkupRenderMode renderMode) {
return render(node, renderMode, false);
}
/**
* @deprecated For use only by non-caja clients of the parser/render
*/
@Deprecated
public static String renderUnsafe(Node node, MarkupRenderMode renderMode) {
return render(node, renderMode, true);
}
private static String render(Node node, MarkupRenderMode renderMode,
boolean renderUnsafe) {
StringBuilder sb = new StringBuilder();
RenderContext rc = new RenderContext(new Concatenator(sb, null))
.withMarkupRenderMode(renderMode);
render(node, rc, renderUnsafe);
rc.getOut().noMoreTokens();
return sb.toString();
}
public static String render(DocumentType docType, Node node,
MarkupRenderMode renderMode) {
return render(docType, node, renderMode, false);
}
/**
* @deprecated For use only by non-caja clients of the parser/render
*/
@Deprecated
public static String renderUnsafe(DocumentType docType, Node node,
MarkupRenderMode renderMode) {
return render(docType, node, renderMode, true);
}
private static String render(DocumentType docType, Node node,
MarkupRenderMode renderMode, boolean renderUnsafe) {
StringBuilder sb = new StringBuilder();
if (null != docType) {
String rendering = renderDocumentType(docType);
if (null != rendering) {
sb.append(rendering);
}
}
sb.append(render(node, renderMode, renderUnsafe));
return sb.toString();
}
private Nodes() { /* uninstantiable */ }
}
final class Renderer {
final RenderContext rc;
final StringBuilder out;
final MarkupRenderMode mode;
final boolean asXml;
final int namespaceDepthAtStart;
Renderer(
RenderContext rc, StringBuilder out, MarkupRenderMode mode,
Namespaces ns) {
this.rc = rc;
this.out = out;
this.mode = mode;
this.asXml = mode == MarkupRenderMode.XML;
this.namespaceDepthAtStart = depth(ns);
}
private static final String HTML_NS = Namespaces.HTML_NAMESPACE_URI;
/**
* @deprecated For use only by non-caja clients of the parser/render
*/
@Deprecated
void renderUnsafe(Node node, Namespaces ns) {
render(node, ns, true);
}
void render(Node node, Namespaces ns) {
render(node, ns, false);
}
void renderSibs(Node sib, Namespaces ns, boolean renderUnsafe) {
for (; sib != null; sib = sib.getNextSibling()) {
render(sib, ns, renderUnsafe);
}
}
void render(Node node, Namespaces ns, boolean renderUnsafe) {
switch (node.getNodeType()) {
case Node.DOCUMENT_NODE: case Node.DOCUMENT_FRAGMENT_NODE:
renderSibs(node.getFirstChild(), ns, renderUnsafe);
break;
case Node.ELEMENT_NODE: {
Element el = (Element) node;
out.append('<');
int tagNameStart = out.length();
boolean addElNs = false;
Namespaces elNs;
{
String nsUri = el.getNamespaceURI();
if (nsUri == null) { nsUri = HTML_NS; }
elNs = ns.forUri(nsUri);
if (elNs == null) {
elNs = ns = addNamespace(ns, nsUri, el.getPrefix());
addElNs = true;
}
}
if (elNs.prefix.length() != 0) {
out.append(elNs.prefix).append(':');
}
String localName = el.getLocalName();
// TODO: do away with the below once Shindig has done away with Neko.
// This is a workaround for a bug in Element.getLocalName in the version
// of Neko used by Shindig.
// See also similar attribute rendering code in this file.
if (localName == null) {
localName = el.getTagName();
if (localName.indexOf(':') >= 0) {
throw new UncheckedUnrenderableException(localName);
}
}
boolean isHtml = elNs.uri == HTML_NS;
if (isHtml) { localName = Strings.lower(localName); }
out.append(localName);
int tagNameEnd = out.length();
if (addElNs) {
out.append(' ');
renderNamespace(elNs);
} else if (elNs.prefix == "" && Nodes.hasXmlnsDeclaration(el)) {
// Since the prefix of the namespace is blank, adding an xmlns cannot
// possibly change the namespace resolution of contained elements or
// attributes.
out.append(" xmlns=\"");
Escaping.escapeXml(elNs.uri, true, out);
out.append('"');
}
NamedNodeMap attrs = el.getAttributes();
for (int i = 0, n = attrs.getLength(); i < n; ++i) {
Attr a = (Attr) attrs.item(i);
String attrUri = a.getNamespaceURI();
// Attributes created via setAttribute calls for ISINDEX elements and
// xmlns attributes have no namespace URI.
String attrLocalName = a.getLocalName();
if (Namespaces.XMLNS_NAMESPACE_URI.equals(attrUri)) {
String nsPrefix = attrLocalName;
Namespaces added = addNamespaceFromAttribute(
nsPrefix, a.getValue(), ns);
if (added == null) { continue; }
ns = added;
} else if (attrLocalName == null) {
attrLocalName = a.getName();
if (isXmlnsDecl(attrLocalName)) {
String nsPrefix = "";
if (attrLocalName.length() > 5) {
nsPrefix = attrLocalName.substring(6);
}
Namespaces added = addNamespaceFromAttribute(
nsPrefix, a.getValue(), ns);
if (added == null) { continue; }
ns = added;
} else if (attrLocalName.indexOf(':') >= 0) {
throw new UncheckedUnrenderableException(null);
}
}
out.append(' ');
if (attrUri != null && (attrUri = attrUri.intern()) != elNs.uri) {
Namespaces attrNs = ns.forUri(attrUri);
if (attrNs == null) {
attrNs = ns = addNamespace(ns, attrUri, a.getPrefix());
renderNamespace(attrNs);
out.append(' ');
}
out.append(attrNs.prefix).append(':');
}
attrLocalName = emitLocalName(attrLocalName, isHtml);
// http://www.w3.org/TR/html401/intro/sgmltut.html
// #didx-boolean_attribute
// Authors should be aware that many user agents only recognize the
// minimized form of boolean attributes and not the full form.
if (!(isHtml && mode == MarkupRenderMode.HTML4_BACKWARDS_COMPAT
&& BooleanAttrs.isBooleanAttr(attrLocalName))) {
out.append("=\"");
Escaping.escapeXml(a.getValue(), true, out);
out.append('"');
}
}
HtmlTextEscapingMode m =
HtmlTextEscapingMode.getModeForTag(localName);
Node first = el.getFirstChild();
if (first == null && m == HtmlTextEscapingMode.VOID) {
out.append(" />");
} else {
out.append(">");
if (m == HtmlTextEscapingMode.CDATA
|| m == HtmlTextEscapingMode.PLAIN_TEXT) {
renderCdata(localName, el, asXml);
} else {
renderSibs(first, ns, renderUnsafe);
}
out.append("</").append(out, tagNameStart, tagNameEnd).append(">");
}
break;
}
case Node.TEXT_NODE:
// This is required for all PCDATA content to distinguish it from tags.
// This is not only appropriate for RCDATA, but is required.
// http://dev.w3.org/html5/markup/aria/syntax.html#escaping-text-span:
// The text in style, script, title, and textarea elements must not
// have an escaping text span start that is not followed by an
// escaping text span end.
// The script and style mentioned above have CDATA content, not RCDATA,
// but title and textarea are the RCDATA to which this is relevant.
Escaping.escapeXml(node.getNodeValue(), true, out);
break;
case Node.CDATA_SECTION_NODE:
String value = node.getNodeValue();
Escaping.escapeXml(value, true, out);
break;
case Node.ATTRIBUTE_NODE: {
Attr a = (Attr) node;
String localName = a.getLocalName();
if (localName == null) {
localName = a.getName();
}
emitLocalName(localName, HTML_NS.equals(a.getNamespaceURI()));
out.append("=\"");
Escaping.escapeXml(a.getValue(), true, out);
out.append('"');
break;
}
case Node.PROCESSING_INSTRUCTION_NODE: {
if (!asXml) {
throw new UncheckedUnrenderableException(
"XML not renderable as HTML due to processing instruction");
}
ProcessingInstruction pi = (ProcessingInstruction) node;
String target = pi.getTarget();
String data = pi.getData();
if (data.contains("?>")) {
throw new UncheckedUnrenderableException(
"XML document not renderable due to \"?>\" inside "
+ "processing instruction");
}
if (Strings.eqIgnoreCase(target.substring(0, 3), "xml")
|| !isName(target)) { // isName check avoids targets with "?>".
throw new UncheckedUnrenderableException(
"Bad processing instruction target " + target);
}
out.append("<?").append(target).append(' ').append(data).append("?>");
break;
}
case Node.COMMENT_NODE: {
// Comment nodes could either be of the standard HTML comment type,
// or of the downlevel-hidden type of IE comments or
// or of the downlevel-revealed type of IE comments.
// 1) Standard HTML comments and IE downlevel-hidden comments are
// have HtmlTokenType.COMMENT present in the userData of the node
// These are only rendered if renderUnsafe is true, and these need
// <!-- and --> to be attached to them, along with extra sanitization
// to make sure they match the specs.
// 2) IE downlevel-revealed comment markers do not need either
// <!-- or --> and these should be rendered even if renderUnsafe is
// false, because these are directives processed by non-IE browsers
// as well.
String commentType =
node.getUserData("COMMENT_TYPE") != null ?
node.getUserData("COMMENT_TYPE").toString() :
// TODO(anupama): We need to have COMMENT as default because
// Node.cloneNode does not copy over userData for nodes.
// Figure out how we can fix this to behave correctly.
HtmlTokenType.COMMENT.toString();
String text = node.getNodeValue();
boolean isStandardComment =
HtmlTokenType.COMMENT.toString().equals(commentType);
if (renderUnsafe && isStandardComment) {
// HTML5 spec 11.1.6
// Comments must start with the four character sequence (<!--).
// Following this sequence, the comment may have text, with the
// additional restriction that the text must not start with a
// single U+003E GREATER-THAN SIGN character (>), nor start with a
// U+002D HYPHEN-MINUS character (-) followed by a U+003E
// GREATER-THAN SIGN (>) character, nor contain two consecutive
// U+002D HYPHEN-MINUS characters (--), nor end with a U+002D
// HYPHEN-MINUS character (-). Finally, the comment must be ended
// by the three character sequence (-->).
// XML 1.0 spec 2.5
// Comment ::= '<!--' ((Char - '-') | ('-' (Char - '-')))* '-->'
String problem = null;
problem = text.startsWith(">") ? "starts with '>'" : problem;
if (rc.markupRenderMode() != MarkupRenderMode.HTML) {
problem = text.startsWith("-") ? "starts with '-'" : problem;
problem = text.endsWith("-") ? "ends with '-'" : problem;
} else {
// If the comment starts or ends with "-", we remove these to make
// it spec-compliant as far as possible.
if (text.startsWith("-") || text.endsWith("-")) {
while (text.startsWith("-")) {
text = text.substring(1);
}
while (text.endsWith("-")) {
text = text.substring(0, text.length() - 1);
}
}
}
// Comment nodes are only rendered in unsafe mode
// TODO: Uncommenting the following check makes the comment rendering
// html5/xml compliant, however, breaks some webpages which rely on
// broken behaviour in browsers.
// problem = text.contains("--") ? "contains '--'" : problem;
if (null != problem) {
throw new UncheckedUnrenderableException(
"XML comment unrenderable because it " + problem);
}
out.append("<!--");
out.append(text);
out.append("-->");
} else if (!isStandardComment) {
// Downlevel-revealed comment.
out.append(text);
}
}
break;
}
}
// This emits the contents of an HTML element that starts a RAWTEXT
// parsing context, which means no entities or tags are parsed.
private void renderCdata(String localName, Element el, boolean asXml) {
StringBuilder cdata = new StringBuilder();
for (Node c = el.getFirstChild(); c != null; c = c.getNextSibling()) {
switch (c.getNodeType()) {
case Node.TEXT_NODE: case Node.CDATA_SECTION_NODE:
String text = c.getNodeValue();
if (asXml) {
Escaping.escapeXml(text, true, cdata);
} else {
cdata.append(text);
}
break;
default:
cdata.append(Nodes.render(c));
break;
}
}
// Whether we're emitting xml or html, verify that the result will
// not break html parsing.
int problemIndex = checkHtmlCdataCloseable(localName, cdata);
if (problemIndex != -1) {
throw new UncheckedUnrenderableException(
"Document not renderable due to '"
+ cdata.subSequence(
problemIndex,
Math.min(cdata.length(), problemIndex + 10))
+ "' in RAWTEXT element");
}
out.append(cdata);
}
private static final int COMMON_NS_DEPTH = depth(Namespaces.COMMON);
private static Namespaces addNamespace(
Namespaces base, String uri, String suggestedPrefix) {
if (isAlphaNumericId(suggestedPrefix)
&& base.forPrefix(suggestedPrefix) == null) {
return new Namespaces(base, suggestedPrefix, uri);
}
// We subtract COMMON_NS_DEPTH so that when we modify Namespaces.COMMON,
// we do not change the output for documents that do not depend on the
// added or removed namespaces.
// It is alright for depth to be negative since dashes can appear in
// namespace prefixes.
return new Namespaces(base, "_ns" + (depth(base) - COMMON_NS_DEPTH), uri);
}
private Namespaces addNamespaceFromAttribute(
String nsPrefix, String nsUri, Namespaces ns) {
Namespaces masked = ns.forPrefix(nsPrefix);
if (masked != null) {
if (!masked.uri.equals(nsUri) && depth(masked) <= namespaceDepthAtStart) {
// Don't output masking declarations.
return null;
}
}
return new Namespaces(ns, nsPrefix, nsUri);
}
private static int depth(Namespaces ns) {
int depth = 0;
for (Namespaces p = ns; p != null; p = p.parent) { ++depth; }
return depth;
}
private void renderNamespace(Namespaces ns) {
out.append("xmlns:").append(ns.prefix).append("=\"");
Escaping.escapeXml(ns.uri, true, out);
out.append('"');
}
private static boolean isXmlnsDecl(String attrName) {
int length = attrName.length();
if (length == 5) {
return "xmlns".equals(attrName);
} else if (length > 6) {
return attrName.startsWith("xmlns:");
} else {
return false;
}
}
private static final boolean[] CASE_SENS_NAME_CHARS = new boolean['z' + 1];
private static final boolean[] CASE_INSENS_NAME_CHARS = new boolean['z' + 1];
static {
for (char ch = '0'; ch <= '9'; ++ch) {
CASE_SENS_NAME_CHARS[ch] = CASE_INSENS_NAME_CHARS[ch] = true;
}
for (char ch = 'a'; ch <= 'z'; ++ch) {
CASE_SENS_NAME_CHARS[ch] = CASE_INSENS_NAME_CHARS[ch] = true;
}
for (char ch = 'A'; ch <= 'Z'; ++ch) {
CASE_SENS_NAME_CHARS[ch] = true;
}
}
private String emitLocalName(String name, boolean isHtml) {
// speed up common case where we already have lower-cased letters and
// digits.
boolean[] simple = isHtml ? CASE_INSENS_NAME_CHARS : CASE_SENS_NAME_CHARS;
for (int i = 0, n = name.length(); i < n; ++i) {
char ch = name.charAt(i);
if (ch > 'z' || !simple[ch]) {
if (isHtml) { name = Strings.lower(name); }
Escaping.escapeXml(name, true, out);
return name;
}
}
out.append(name);
return name;
}
private static boolean isAlphaNumericId(String s) {
if (s == null) { return false; }
int n = s.length();
if (n == 0) { return false; }
char ch0 = s.charAt(0);
if (!(('A' <= ch0 && ch0 <= 'Z') || ('a' <= ch0 && ch0 <= 'z'))) {
return false;
}
for (int i = 1; i < n; ++i) {
char ch = s.charAt(i);
if (ch > 'z' || !CASE_SENS_NAME_CHARS[ch]) { return false; }
}
return true;
}
/**
* Check that the content of a CDATA element does not contain a close tag
* for that element or unbalanced escaping text spans.
*
* @return -1 if the content is safe, or the start index of a close tag or
* escaping text span boundary otherwise.
*/
private static int checkHtmlCdataCloseable(
String localName, StringBuilder sb) {
int escapingTextSpanStart = -1;
for (int i = 0, n = sb.length(); i < n; ++i) {
char ch = sb.charAt(i);
// NUL chars tend to confuse parsers
if (ch == '\u0000') {
return i;
}
switch (ch) {
case '<':
if (i + 3 < n
&& '!' == sb.charAt(i + 1)
&& '-' == sb.charAt(i + 2)
&& '-' == sb.charAt(i + 3)) {
if (escapingTextSpanStart == -1) {
escapingTextSpanStart = i;
} else {
return escapingTextSpanStart;
}
} else if (i + 1 + localName.length() < n
&& '/' == sb.charAt(i + 1)
&& Strings.eqIgnoreCase(localName,
sb.substring(i + 2, i + 2 + localName.length()))) {
// A close tag contained in the content.
if (escapingTextSpanStart < 0) {
// We could try some recovery strategies here.
// E.g. prepending "/<!--\n" to sb if "script".equals(localName)
return i;
}
if (!"script".equals(localName)) {
// Script tags are commonly included inside script tags.
// <script><!--document.write('<script>f()</script>');--></script>
// but this does not happen in other CDATA element types.
// Actually allowing an end tag inside others is problematic.
// Specifically,
// <style><!--</style>-->/* foo */</style>
// displays the text "/* foo */" on some browsers.
return i;
}
}
break;
case '>':
// From the HTML5 spec:
// The text in style, script, title, and textarea elements must not
// have an escaping text span start that is not followed by an
// escaping text span end.
// We look left since the HTML 5 spec allows the escaping text span
// end to share dashes with the start.
if (i >= 2 && '-' == sb.charAt(i - 1) && '-' == sb.charAt(i - 2)) {
if (escapingTextSpanStart < 0) { return i - 2; }
escapingTextSpanStart = -1;
}
break;
}
}
if (escapingTextSpanStart >= 0) {
// We could try recovery strategies here.
// E.g. appending "//-->" to the buffer if "script".equals(localName)
return escapingTextSpanStart;
}
return -1;
}
/** As defined in section 2.6 of XML version 5. */
private static boolean isName(String s) {
int n = s.length();
if (n == 0) { return false; }
if (!NAME_START_CHARS.contains(s.codePointAt(0))) { return false; }
for (int i = 1; i < n; ++i) {
if (!NAME_CHARS.contains(s.charAt(i))) { return false; }
}
return true;
}
/**
* From http://www.w3.org/TR/2008/REC-xml-20081126/#NT-NameStartChar
* <pre>
* NameStartChar ::= ":" | [A-Z] | "_" | [a-z] | [#xC0-#xD6]
* | [#xD8-#xF6] | [#xF8-#x2FF] | [#x370-#x37D] | [#x37F-#x1FFF]
* | [#x200C-#x200D] | [#x2070-#x218F] | [#x2C00-#x2FEF] | [#x3001-#xD7FF]
* | [#xF900-#xFDCF] | [#xFDF0-#xFFFD] | [#x10000-#xEFFFF]
* </pre>
*/
private static final SparseBitSet NAME_START_CHARS = SparseBitSet.withRanges(
0x3a, 0x3b, 0x41, 0x5b, 0x5f, 0x60, 0x61, 0x7b, 0xc0, 0xd7, 0xd8, 0xf7,
0x2ff, 0x300, 0x370, 0x37e, 0x37f, 0x2000, 0x200c, 0x200e, 0x2070, 0x2190,
0x2c00, 0x2ff0, 0x3001, 0xd800, 0xf900, 0xfdd0, 0xfdf0, 0xfffe,
0x10000, 0xf0000);
/**
* From http://www.w3.org/TR/2008/REC-xml-20081126/#NT-NameChar
* <pre>
* NameChar ::= NameStartChar | "-" | "." | [0-9] | #xB7
* | [#x0300-#x036F] | [#x203F-#x2040]
* </pre>
*/
private static final SparseBitSet NAME_CHARS = SparseBitSet.withRanges(
0x2d, 0x2f, 0x30, 0x3b, 0x41, 0x5b, 0x5f, 0x60, 0x61, 0x7b, 0xb7, 0xb8,
0xc0, 0xd7, 0xd8, 0xf7, 0x2ff, 0x37e, 0x37f, 0x2000, 0x200c, 0x200e,
0x203f, 0x2041, 0x2070, 0x2190, 0x2c00, 0x2ff0, 0x3001, 0xd800,
0xf900, 0xfdd0, 0xfdf0, 0xfffe, 0x10000, 0xf0000);
}
final class BooleanAttrs {
/**
* The set of HTML4.01 attributes that have the sole value {@code (<name>)}
* where {@code <name>} is the attribute name and that are #IMPLIED.
* @see <a href="http://www.w3.org/TR/html401/index/attributes.html">
* the HTML4.01 attributes index</a>
*/
private static final Set<String> BOOLEAN_ATTR_NAMES = ImmutableSet.of(
"checked", "compact", "declare", "defer", "disabled", "ismap", "multiple",
"nohref", "noresize", "noshade", "nowrap", "readonly", "selected");
// http://www.w3.org/TR/html401/index/attributes.html
static boolean isBooleanAttr(String htmlAttrLocalName) {
return BOOLEAN_ATTR_NAMES.contains(htmlAttrLocalName);
}
}
|
googleapis/google-cloud-java | 36,006 | java-notebooks/proto-google-cloud-notebooks-v2/src/main/java/com/google/cloud/notebooks/v2/NetworkInterface.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/notebooks/v2/gce_setup.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.notebooks.v2;
/**
*
*
* <pre>
* The definition of a network interface resource attached to a VM.
* </pre>
*
* Protobuf type {@code google.cloud.notebooks.v2.NetworkInterface}
*/
public final class NetworkInterface extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.notebooks.v2.NetworkInterface)
NetworkInterfaceOrBuilder {
private static final long serialVersionUID = 0L;
// Use NetworkInterface.newBuilder() to construct.
private NetworkInterface(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private NetworkInterface() {
network_ = "";
subnet_ = "";
nicType_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new NetworkInterface();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.notebooks.v2.GceSetupProto
.internal_static_google_cloud_notebooks_v2_NetworkInterface_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.notebooks.v2.GceSetupProto
.internal_static_google_cloud_notebooks_v2_NetworkInterface_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.notebooks.v2.NetworkInterface.class,
com.google.cloud.notebooks.v2.NetworkInterface.Builder.class);
}
/**
*
*
* <pre>
* The type of vNIC driver.
* Default should be NIC_TYPE_UNSPECIFIED.
* </pre>
*
* Protobuf enum {@code google.cloud.notebooks.v2.NetworkInterface.NicType}
*/
public enum NicType implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* No type specified.
* </pre>
*
* <code>NIC_TYPE_UNSPECIFIED = 0;</code>
*/
NIC_TYPE_UNSPECIFIED(0),
/**
*
*
* <pre>
* VIRTIO
* </pre>
*
* <code>VIRTIO_NET = 1;</code>
*/
VIRTIO_NET(1),
/**
*
*
* <pre>
* GVNIC
* </pre>
*
* <code>GVNIC = 2;</code>
*/
GVNIC(2),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* No type specified.
* </pre>
*
* <code>NIC_TYPE_UNSPECIFIED = 0;</code>
*/
public static final int NIC_TYPE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* VIRTIO
* </pre>
*
* <code>VIRTIO_NET = 1;</code>
*/
public static final int VIRTIO_NET_VALUE = 1;
/**
*
*
* <pre>
* GVNIC
* </pre>
*
* <code>GVNIC = 2;</code>
*/
public static final int GVNIC_VALUE = 2;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static NicType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static NicType forNumber(int value) {
switch (value) {
case 0:
return NIC_TYPE_UNSPECIFIED;
case 1:
return VIRTIO_NET;
case 2:
return GVNIC;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<NicType> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<NicType> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<NicType>() {
public NicType findValueByNumber(int number) {
return NicType.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.notebooks.v2.NetworkInterface.getDescriptor().getEnumTypes().get(0);
}
private static final NicType[] VALUES = values();
public static NicType valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private NicType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.notebooks.v2.NetworkInterface.NicType)
}
public static final int NETWORK_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object network_ = "";
/**
*
*
* <pre>
* Optional. The name of the VPC that this VM instance is in.
* Format:
* `projects/{project_id}/global/networks/{network_id}`
* </pre>
*
* <code>string network = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The network.
*/
@java.lang.Override
public java.lang.String getNetwork() {
java.lang.Object ref = network_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
network_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The name of the VPC that this VM instance is in.
* Format:
* `projects/{project_id}/global/networks/{network_id}`
* </pre>
*
* <code>string network = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for network.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNetworkBytes() {
java.lang.Object ref = network_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
network_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SUBNET_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object subnet_ = "";
/**
*
*
* <pre>
* Optional. The name of the subnet that this VM instance is in.
* Format:
* `projects/{project_id}/regions/{region}/subnetworks/{subnetwork_id}`
* </pre>
*
* <code>string subnet = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The subnet.
*/
@java.lang.Override
public java.lang.String getSubnet() {
java.lang.Object ref = subnet_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
subnet_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The name of the subnet that this VM instance is in.
* Format:
* `projects/{project_id}/regions/{region}/subnetworks/{subnetwork_id}`
* </pre>
*
* <code>string subnet = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for subnet.
*/
@java.lang.Override
public com.google.protobuf.ByteString getSubnetBytes() {
java.lang.Object ref = subnet_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
subnet_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int NIC_TYPE_FIELD_NUMBER = 3;
private int nicType_ = 0;
/**
*
*
* <pre>
* Optional. The type of vNIC to be used on this interface. This may be gVNIC
* or VirtioNet.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.NetworkInterface.NicType nic_type = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for nicType.
*/
@java.lang.Override
public int getNicTypeValue() {
return nicType_;
}
/**
*
*
* <pre>
* Optional. The type of vNIC to be used on this interface. This may be gVNIC
* or VirtioNet.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.NetworkInterface.NicType nic_type = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The nicType.
*/
@java.lang.Override
public com.google.cloud.notebooks.v2.NetworkInterface.NicType getNicType() {
com.google.cloud.notebooks.v2.NetworkInterface.NicType result =
com.google.cloud.notebooks.v2.NetworkInterface.NicType.forNumber(nicType_);
return result == null
? com.google.cloud.notebooks.v2.NetworkInterface.NicType.UNRECOGNIZED
: result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(network_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, network_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(subnet_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, subnet_);
}
if (nicType_
!= com.google.cloud.notebooks.v2.NetworkInterface.NicType.NIC_TYPE_UNSPECIFIED
.getNumber()) {
output.writeEnum(3, nicType_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(network_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, network_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(subnet_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, subnet_);
}
if (nicType_
!= com.google.cloud.notebooks.v2.NetworkInterface.NicType.NIC_TYPE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, nicType_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.notebooks.v2.NetworkInterface)) {
return super.equals(obj);
}
com.google.cloud.notebooks.v2.NetworkInterface other =
(com.google.cloud.notebooks.v2.NetworkInterface) obj;
if (!getNetwork().equals(other.getNetwork())) return false;
if (!getSubnet().equals(other.getSubnet())) return false;
if (nicType_ != other.nicType_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NETWORK_FIELD_NUMBER;
hash = (53 * hash) + getNetwork().hashCode();
hash = (37 * hash) + SUBNET_FIELD_NUMBER;
hash = (53 * hash) + getSubnet().hashCode();
hash = (37 * hash) + NIC_TYPE_FIELD_NUMBER;
hash = (53 * hash) + nicType_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.notebooks.v2.NetworkInterface parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.notebooks.v2.NetworkInterface parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.notebooks.v2.NetworkInterface parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.notebooks.v2.NetworkInterface parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.notebooks.v2.NetworkInterface parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.notebooks.v2.NetworkInterface parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.notebooks.v2.NetworkInterface parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.notebooks.v2.NetworkInterface parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.notebooks.v2.NetworkInterface parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.notebooks.v2.NetworkInterface parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.notebooks.v2.NetworkInterface parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.notebooks.v2.NetworkInterface parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.notebooks.v2.NetworkInterface prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The definition of a network interface resource attached to a VM.
* </pre>
*
* Protobuf type {@code google.cloud.notebooks.v2.NetworkInterface}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.notebooks.v2.NetworkInterface)
com.google.cloud.notebooks.v2.NetworkInterfaceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.notebooks.v2.GceSetupProto
.internal_static_google_cloud_notebooks_v2_NetworkInterface_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.notebooks.v2.GceSetupProto
.internal_static_google_cloud_notebooks_v2_NetworkInterface_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.notebooks.v2.NetworkInterface.class,
com.google.cloud.notebooks.v2.NetworkInterface.Builder.class);
}
// Construct using com.google.cloud.notebooks.v2.NetworkInterface.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
network_ = "";
subnet_ = "";
nicType_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.notebooks.v2.GceSetupProto
.internal_static_google_cloud_notebooks_v2_NetworkInterface_descriptor;
}
@java.lang.Override
public com.google.cloud.notebooks.v2.NetworkInterface getDefaultInstanceForType() {
return com.google.cloud.notebooks.v2.NetworkInterface.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.notebooks.v2.NetworkInterface build() {
com.google.cloud.notebooks.v2.NetworkInterface result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.notebooks.v2.NetworkInterface buildPartial() {
com.google.cloud.notebooks.v2.NetworkInterface result =
new com.google.cloud.notebooks.v2.NetworkInterface(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.notebooks.v2.NetworkInterface result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.network_ = network_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.subnet_ = subnet_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.nicType_ = nicType_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.notebooks.v2.NetworkInterface) {
return mergeFrom((com.google.cloud.notebooks.v2.NetworkInterface) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.notebooks.v2.NetworkInterface other) {
if (other == com.google.cloud.notebooks.v2.NetworkInterface.getDefaultInstance()) return this;
if (!other.getNetwork().isEmpty()) {
network_ = other.network_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getSubnet().isEmpty()) {
subnet_ = other.subnet_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.nicType_ != 0) {
setNicTypeValue(other.getNicTypeValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
network_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
subnet_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
nicType_ = input.readEnum();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object network_ = "";
/**
*
*
* <pre>
* Optional. The name of the VPC that this VM instance is in.
* Format:
* `projects/{project_id}/global/networks/{network_id}`
* </pre>
*
* <code>string network = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The network.
*/
public java.lang.String getNetwork() {
java.lang.Object ref = network_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
network_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The name of the VPC that this VM instance is in.
* Format:
* `projects/{project_id}/global/networks/{network_id}`
* </pre>
*
* <code>string network = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for network.
*/
public com.google.protobuf.ByteString getNetworkBytes() {
java.lang.Object ref = network_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
network_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The name of the VPC that this VM instance is in.
* Format:
* `projects/{project_id}/global/networks/{network_id}`
* </pre>
*
* <code>string network = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The network to set.
* @return This builder for chaining.
*/
public Builder setNetwork(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
network_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The name of the VPC that this VM instance is in.
* Format:
* `projects/{project_id}/global/networks/{network_id}`
* </pre>
*
* <code>string network = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearNetwork() {
network_ = getDefaultInstance().getNetwork();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The name of the VPC that this VM instance is in.
* Format:
* `projects/{project_id}/global/networks/{network_id}`
* </pre>
*
* <code>string network = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for network to set.
* @return This builder for chaining.
*/
public Builder setNetworkBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
network_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object subnet_ = "";
/**
*
*
* <pre>
* Optional. The name of the subnet that this VM instance is in.
* Format:
* `projects/{project_id}/regions/{region}/subnetworks/{subnetwork_id}`
* </pre>
*
* <code>string subnet = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The subnet.
*/
public java.lang.String getSubnet() {
java.lang.Object ref = subnet_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
subnet_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The name of the subnet that this VM instance is in.
* Format:
* `projects/{project_id}/regions/{region}/subnetworks/{subnetwork_id}`
* </pre>
*
* <code>string subnet = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for subnet.
*/
public com.google.protobuf.ByteString getSubnetBytes() {
java.lang.Object ref = subnet_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
subnet_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The name of the subnet that this VM instance is in.
* Format:
* `projects/{project_id}/regions/{region}/subnetworks/{subnetwork_id}`
* </pre>
*
* <code>string subnet = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The subnet to set.
* @return This builder for chaining.
*/
public Builder setSubnet(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
subnet_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The name of the subnet that this VM instance is in.
* Format:
* `projects/{project_id}/regions/{region}/subnetworks/{subnetwork_id}`
* </pre>
*
* <code>string subnet = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearSubnet() {
subnet_ = getDefaultInstance().getSubnet();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The name of the subnet that this VM instance is in.
* Format:
* `projects/{project_id}/regions/{region}/subnetworks/{subnetwork_id}`
* </pre>
*
* <code>string subnet = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for subnet to set.
* @return This builder for chaining.
*/
public Builder setSubnetBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
subnet_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int nicType_ = 0;
/**
*
*
* <pre>
* Optional. The type of vNIC to be used on this interface. This may be gVNIC
* or VirtioNet.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.NetworkInterface.NicType nic_type = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for nicType.
*/
@java.lang.Override
public int getNicTypeValue() {
return nicType_;
}
/**
*
*
* <pre>
* Optional. The type of vNIC to be used on this interface. This may be gVNIC
* or VirtioNet.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.NetworkInterface.NicType nic_type = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The enum numeric value on the wire for nicType to set.
* @return This builder for chaining.
*/
public Builder setNicTypeValue(int value) {
nicType_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The type of vNIC to be used on this interface. This may be gVNIC
* or VirtioNet.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.NetworkInterface.NicType nic_type = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The nicType.
*/
@java.lang.Override
public com.google.cloud.notebooks.v2.NetworkInterface.NicType getNicType() {
com.google.cloud.notebooks.v2.NetworkInterface.NicType result =
com.google.cloud.notebooks.v2.NetworkInterface.NicType.forNumber(nicType_);
return result == null
? com.google.cloud.notebooks.v2.NetworkInterface.NicType.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Optional. The type of vNIC to be used on this interface. This may be gVNIC
* or VirtioNet.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.NetworkInterface.NicType nic_type = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The nicType to set.
* @return This builder for chaining.
*/
public Builder setNicType(com.google.cloud.notebooks.v2.NetworkInterface.NicType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
nicType_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The type of vNIC to be used on this interface. This may be gVNIC
* or VirtioNet.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.NetworkInterface.NicType nic_type = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearNicType() {
bitField0_ = (bitField0_ & ~0x00000004);
nicType_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.notebooks.v2.NetworkInterface)
}
// @@protoc_insertion_point(class_scope:google.cloud.notebooks.v2.NetworkInterface)
private static final com.google.cloud.notebooks.v2.NetworkInterface DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.notebooks.v2.NetworkInterface();
}
public static com.google.cloud.notebooks.v2.NetworkInterface getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<NetworkInterface> PARSER =
new com.google.protobuf.AbstractParser<NetworkInterface>() {
@java.lang.Override
public NetworkInterface parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<NetworkInterface> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<NetworkInterface> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.notebooks.v2.NetworkInterface getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/ofbiz | 36,088 | framework/base/src/main/java/org/apache/ofbiz/base/util/ObjectType.java | /*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package org.apache.ofbiz.base.util;
import java.io.Serializable;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.TimeZone;
import org.apache.ofbiz.base.conversion.ConversionException;
import org.apache.ofbiz.base.conversion.Converter;
import org.apache.ofbiz.base.conversion.Converters;
import org.apache.ofbiz.base.conversion.LocalizedConverter;
import org.apache.ofbiz.base.lang.IsEmpty;
import org.apache.ofbiz.base.lang.SourceMonitored;
import org.w3c.dom.Node;
/**
* Utilities for analyzing and converting Object types in Java
* Takes advantage of reflection
*/
public class ObjectType {
public static final String module = ObjectType.class.getName();
public static final Object NULL = new NullObject();
public static final String LANG_PACKAGE = "java.lang."; // We will test both the raw value and this + raw value
public static final String SQL_PACKAGE = "java.sql."; // We will test both the raw value and this + raw value
private static final Map<String, String> classAlias = new HashMap<String, String>();
private static final Map<String, Class<?>> primitives = new HashMap<String, Class<?>>();
static {
classAlias.put("Object", "java.lang.Object");
classAlias.put("String", "java.lang.String");
classAlias.put("Boolean", "java.lang.Boolean");
classAlias.put("BigDecimal", "java.math.BigDecimal");
classAlias.put("Double", "java.lang.Double");
classAlias.put("Float", "java.lang.Float");
classAlias.put("Long", "java.lang.Long");
classAlias.put("Integer", "java.lang.Integer");
classAlias.put("Short", "java.lang.Short");
classAlias.put("Byte", "java.lang.Byte");
classAlias.put("Character", "java.lang.Character");
classAlias.put("Timestamp", "java.sql.Timestamp");
classAlias.put("Time", "java.sql.Time");
classAlias.put("Date", "java.sql.Date");
classAlias.put("Locale", "java.util.Locale");
classAlias.put("Collection", "java.util.Collection");
classAlias.put("List", "java.util.List");
classAlias.put("Set", "java.util.Set");
classAlias.put("Map", "java.util.Map");
classAlias.put("HashMap", "java.util.HashMap");
classAlias.put("TimeZone", "java.util.TimeZone");
classAlias.put("TimeDuration", "org.apache.ofbiz.base.util.TimeDuration");
classAlias.put("GenericValue", "org.apache.ofbiz.entity.GenericValue");
classAlias.put("GenericPK", "org.apache.ofbiz.entity.GenericPK");
classAlias.put("GenericEntity", "org.apache.ofbiz.entity.GenericEntity");
primitives.put("boolean", Boolean.TYPE);
primitives.put("short", Short.TYPE);
primitives.put("int", Integer.TYPE);
primitives.put("long", Long.TYPE);
primitives.put("float", Float.TYPE);
primitives.put("double", Double.TYPE);
primitives.put("byte", Byte.TYPE);
primitives.put("char", Character.TYPE);
}
/**
* Loads a class with the current thread's context classloader.
* @param className The name of the class to load
* @return The requested class
* @throws ClassNotFoundException
*/
public static Class<?> loadClass(String className) throws ClassNotFoundException {
return loadClass(className, null);
}
/**
* Loads a class with the specified classloader.
* @param className The name of the class to load
* @param loader The ClassLoader to use
* @return The requested class
* @throws ClassNotFoundException
*/
public static Class<?> loadClass(String className, ClassLoader loader) throws ClassNotFoundException {
Class<?> theClass = null;
// if it is a primitive type, return the object from the "primitives" map
if (primitives.containsKey(className)) {
return primitives.get(className);
}
int genericsStart = className.indexOf("<");
if (genericsStart != -1) className = className.substring(0, genericsStart);
// Handle array classes. Details in http://java.sun.com/j2se/1.5.0/docs/guide/jni/spec/types.html#wp16437
if (className.endsWith("[]")) {
if (Character.isLowerCase(className.charAt(0)) && className.indexOf(".") < 0) {
String prefix = className.substring(0, 1).toUpperCase();
// long and boolean have other prefix than first letter
if (className.startsWith("long")) {
prefix = "J";
} else if (className.startsWith("boolean")) {
prefix = "Z";
}
className = "[" + prefix;
} else {
Class<?> arrayClass = loadClass(className.replace("[]", ""), loader);
className = "[L" + arrayClass.getName().replace("[]", "") + ";";
}
}
// if className is an alias (e.g. "String") then replace it with the proper class name (e.g. "java.lang.String")
if (classAlias.containsKey(className)) {
className = classAlias.get(className);
}
if (loader == null) loader = Thread.currentThread().getContextClassLoader();
theClass = Class.forName(className, true, loader);
return theClass;
}
/**
* Returns an instance of the specified class. This uses the default
* no-arg constructor to create the instance.
* @param className Name of the class to instantiate
* @return An instance of the named class
* @throws ClassNotFoundException
* @throws InstantiationException
* @throws IllegalAccessException
*/
public static Object getInstance(String className) throws ClassNotFoundException,
InstantiationException, IllegalAccessException {
Class<?> c = loadClass(className);
Object o = c.newInstance();
if (Debug.verboseOn()) Debug.logVerbose("Instantiated object: " + o.toString(), module);
return o;
}
/**
* Tests if a class properly implements the specified interface.
* @param objectClass Class to test
* @param interfaceName Name of the interface to test against
* @return true if interfaceName is an interface of objectClass
* @throws ClassNotFoundException
*/
public static boolean interfaceOf(Class<?> objectClass, String interfaceName) throws ClassNotFoundException {
Class<?> interfaceClass = loadClass(interfaceName);
return interfaceOf(objectClass, interfaceClass);
}
/**
* Tests if a class properly implements the specified interface.
* @param objectClass Class to test
* @param interfaceObject to test against
* @return true if interfaceObject is an interface of the objectClass
*/
public static boolean interfaceOf(Class<?> objectClass, Object interfaceObject) {
Class<?> interfaceClass = interfaceObject.getClass();
return interfaceOf(objectClass, interfaceClass);
}
/**
* Returns an instance of the specified class using the constructor matching the specified parameters.
* @param className Name of the class to instantiate
* @param parameters Parameters passed to the constructor
* @return An instance of the className
* @throws ClassNotFoundException
* @throws InstantiationException
* @throws IllegalAccessException
*/
public static Object getInstance(String className, Object[] parameters) throws ClassNotFoundException,
InstantiationException, IllegalAccessException, NoSuchMethodException, InvocationTargetException {
Class<?>[] sig = new Class<?>[parameters.length];
for (int i = 0; i < sig.length; i++) {
sig[i] = parameters[i].getClass();
}
Class<?> c = loadClass(className);
Constructor<?> con = c.getConstructor(sig);
Object o = con.newInstance(parameters);
if (Debug.verboseOn()) Debug.logVerbose("Instantiated object: " + o.toString(), module);
return o;
}
/**
* Tests if an object properly implements the specified interface.
* @param obj Object to test
* @param interfaceName Name of the interface to test against
* @return true if interfaceName is an interface of obj
* @throws ClassNotFoundException
*/
public static boolean interfaceOf(Object obj, String interfaceName) throws ClassNotFoundException {
Class<?> interfaceClass = loadClass(interfaceName);
return interfaceOf(obj, interfaceClass);
}
/**
* Tests if an object properly implements the specified interface.
* @param obj Object to test
* @param interfaceObject to test against
* @return true if interfaceObject is an interface of obj
*/
public static boolean interfaceOf(Object obj, Object interfaceObject) {
Class<?> interfaceClass = interfaceObject.getClass();
return interfaceOf(obj, interfaceClass);
}
/**
* Tests if an object properly implements the specified interface.
* @param obj Object to test
* @param interfaceClass Class to test against
* @return true if interfaceClass is an interface of obj
*/
public static boolean interfaceOf(Object obj, Class<?> interfaceClass) {
Class<?> objectClass = obj.getClass();
return interfaceOf(objectClass, interfaceClass);
}
/**
* Tests if a class properly implements the specified interface.
* @param objectClass Class to test
* @param interfaceClass Class to test against
* @return true if interfaceClass is an interface of objectClass
*/
public static boolean interfaceOf(Class<?> objectClass, Class<?> interfaceClass) {
while (objectClass != null) {
Class<?>[] ifaces = objectClass.getInterfaces();
for (Class<?> iface: ifaces) {
if (iface == interfaceClass) return true;
}
objectClass = objectClass.getSuperclass();
}
return false;
}
/**
* Tests if a class is a class of or a sub-class of the parent.
* @param objectClass Class to test
* @param parentName Name of the parent class to test against
* @return true if objectClass is a class of or a sub-class of the parent
* @throws ClassNotFoundException
*/
public static boolean isOrSubOf(Class<?> objectClass, String parentName) throws ClassNotFoundException {
Class<?> parentClass = loadClass(parentName);
return isOrSubOf(objectClass, parentClass);
}
/**
* Tests if a class is a class of or a sub-class of the parent.
* @param objectClass Class to test
* @param parentObject Object to test against
* @return true if objectClass is a class of or a sub-class of the parent
*/
public static boolean isOrSubOf(Class<?> objectClass, Object parentObject) {
Class<?> parentClass = parentObject.getClass();
return isOrSubOf(objectClass, parentClass);
}
/**
* Tests if an object is an instance of or a sub-class of the parent.
* @param obj Object to test
* @param parentName Name of the parent class to test against
* @return true if obj is an instance of or a sub-class of the parent
* @throws ClassNotFoundException
*/
public static boolean isOrSubOf(Object obj, String parentName) throws ClassNotFoundException {
Class<?> parentClass = loadClass(parentName);
return isOrSubOf(obj, parentClass);
}
/**
* Tests if an object is an instance of or a sub-class of the parent.
* @param obj Object to test
* @param parentObject Object to test against
* @return true if obj is an instance of or a sub-class of the parent
*/
public static boolean isOrSubOf(Object obj, Object parentObject) {
Class<?> parentClass = parentObject.getClass();
return isOrSubOf(obj, parentClass);
}
/**
* Tests if an object is an instance of or a sub-class of the parent.
* @param obj Object to test
* @param parentClass Class to test against
* @return true if obj is an instance of or a sub-class of the parent
*/
public static boolean isOrSubOf(Object obj, Class<?> parentClass) {
Class<?> objectClass = obj.getClass();
return isOrSubOf(objectClass, parentClass);
}
/**
* Tests if a class is a class of or a sub-class of the parent.
* @param objectClass Class to test
* @param parentClass Class to test against
* @return true if objectClass is a class of or a sub-class of the parent
*/
public static boolean isOrSubOf(Class<?> objectClass, Class<?> parentClass) {
while (objectClass != null) {
if (objectClass == parentClass) return true;
objectClass = objectClass.getSuperclass();
}
return false;
}
/**
* Tests if a class is a class of a sub-class of or properly implements an interface.
* @param objectClass Class to test
* @param typeObject Object to test against
* @return true if objectClass is a class of a sub-class of, or properly implements an interface
*/
public static boolean instanceOf(Class<?> objectClass, Object typeObject) {
Class<?> typeClass = typeObject.getClass();
return instanceOf(objectClass, typeClass);
}
/**
* Tests if a class is a class of a sub-class of or properly implements an interface.
* @param objectClass Class to test
* @param typeName name to test against
* @return true if objectClass is a class or a sub-class of, or properly implements an interface
*/
public static boolean instanceOf(Class<?> objectClass, String typeName) {
return instanceOf(objectClass, typeName, null);
}
/**
* Tests if an object is an instance of a sub-class of or properly implements an interface.
* @param obj Object to test
* @param typeObject Object to test against
* @return true if obj is an instance of a sub-class of, or properly implements an interface
*/
public static boolean instanceOf(Object obj, Object typeObject) {
Class<?> typeClass = typeObject.getClass();
return instanceOf(obj, typeClass);
}
/**
* Tests if an object is an instance of a sub-class of or properly implements an interface.
* @param obj Object to test
* @param typeName name to test against
* @return true if obj is an instance of a sub-class of, or properly implements an interface
*/
public static boolean instanceOf(Object obj, String typeName) {
return instanceOf(obj, typeName, null);
}
/**
* Tests if a class is a class of a sub-class of or properly implements an interface.
* @param objectClass Class to test
* @param typeName Object to test against
* @param loader
* @return true if objectClass is a class of a sub-class of, or properly implements an interface
*/
public static boolean instanceOf(Class<?> objectClass, String typeName, ClassLoader loader) {
Class<?> infoClass = loadInfoClass(typeName, loader);
if (infoClass == null)
throw new IllegalArgumentException("Illegal type found in info map (could not load class for specified type)");
return instanceOf(objectClass, infoClass);
}
/**
* Tests if an object is an instance of a sub-class of or properly implements an interface.
* @param obj Object to test
* @param typeName Object to test against
* @param loader
* @return true if obj is an instance of a sub-class of, or properly implements an interface
*/
public static boolean instanceOf(Object obj, String typeName, ClassLoader loader) {
Class<?> infoClass = loadInfoClass(typeName, loader);
if (infoClass == null) {
throw new IllegalArgumentException("Illegal type found in info map (could not load class for specified type)");
}
return instanceOf(obj, infoClass);
}
public static Class<?> loadInfoClass(String typeName, ClassLoader loader) {
try {
return loadClass(typeName, loader);
} catch (SecurityException se1) {
throw new IllegalArgumentException("Problems with classloader: security exception (" +
se1.getMessage() + ")");
} catch (ClassNotFoundException e1) {
try {
return loadClass(LANG_PACKAGE + typeName, loader);
} catch (SecurityException se2) {
throw new IllegalArgumentException("Problems with classloader: security exception (" +
se2.getMessage() + ")");
} catch (ClassNotFoundException e2) {
try {
return loadClass(SQL_PACKAGE + typeName, loader);
} catch (SecurityException se3) {
throw new IllegalArgumentException("Problems with classloader: security exception (" +
se3.getMessage() + ")");
} catch (ClassNotFoundException e3) {
throw new IllegalArgumentException("Cannot find and load the class of type: " + typeName +
" or of type: " + LANG_PACKAGE + typeName + " or of type: " + SQL_PACKAGE + typeName +
": (" + e3.getMessage() + ")");
}
}
}
}
/**
* Tests if an object is an instance of a sub-class of or properly implements an interface.
* @param obj Object to test
* @param typeClass Class to test against
* @return true if obj is an instance of a sub-class of typeClass
*/
public static boolean instanceOf(Object obj, Class<?> typeClass) {
if (obj == null) return true;
Class<?> objectClass = obj.getClass();
return instanceOf(objectClass, typeClass);
}
/**
* Tests if a class is a class of a sub-class of or properly implements an interface.
* @param objectClass Class to test
* @param typeClass Class to test against
* @return true if objectClass is a class or sub-class of, or implements typeClass
*/
public static boolean instanceOf(Class<?> objectClass, Class<?> typeClass) {
if (typeClass.isInterface() && !objectClass.isInterface()) {
return interfaceOf(objectClass, typeClass);
} else {
return isOrSubOf(objectClass, typeClass);
}
}
public static Object simpleTypeConvert(Object obj, String type, String format, Locale locale, boolean noTypeFail) throws GeneralException {
return simpleTypeConvert(obj, type, format, null, locale, noTypeFail);
}
/**
* Converts the passed object to the named simple type. Supported types
* include: String, Boolean, Double, Float, Long, Integer, Date (java.sql.Date),
* Time, Timestamp, TimeZone;
* @param obj Object to convert
* @param type Optional Java class name of type to convert to. A <code>null</code> or empty <code>String</code> will return the original object.
* @param format Optional (can be null) format string for Date, Time, Timestamp
* @param timeZone Optional (can be null) TimeZone for converting dates and times
* @param locale Optional (can be null) Locale for formatting and parsing Double, Float, Long, Integer
* @param noTypeFail Fail (Exception) when no type conversion is available, false will return the primary object
* @return the converted value
* @throws GeneralException
*/
@SourceMonitored
@SuppressWarnings("unchecked")
public static Object simpleTypeConvert(Object obj, String type, String format, TimeZone timeZone, Locale locale, boolean noTypeFail) throws GeneralException {
if (obj == null || UtilValidate.isEmpty(type) || "Object".equals(type) || "java.lang.Object".equals(type)) {
return obj;
}
if ("PlainString".equals(type)) {
return obj.toString();
}
if (obj instanceof Node) {
Node node = (Node) obj;
String nodeValue = node.getTextContent();
if ("String".equals(type) || "java.lang.String".equals(type)) {
return nodeValue;
} else {
return simpleTypeConvert(nodeValue, type, format, timeZone, locale, noTypeFail);
}
}
int genericsStart = type.indexOf("<");
if (genericsStart != -1) {
type = type.substring(0, genericsStart);
}
Class<?> sourceClass = obj.getClass();
Class<?> targetClass = null;
try {
targetClass = loadClass(type);
} catch (ClassNotFoundException e) {
throw new GeneralException("Conversion from " + sourceClass.getName() + " to " + type + " not currently supported", e);
}
if (sourceClass.equals(targetClass)) {
return obj;
}
if (obj instanceof String && ((String) obj).length() == 0) {
return null;
}
Converter<Object, Object> converter = null;
try {
converter = (Converter<Object, Object>) Converters.getConverter(sourceClass, targetClass);
} catch (ClassNotFoundException e) {}
if (converter != null) {
if (converter instanceof LocalizedConverter) {
@SuppressWarnings("rawtypes")
LocalizedConverter<Object, Object> localizedConverter = (LocalizedConverter) converter;
if (timeZone == null) {
timeZone = TimeZone.getDefault();
}
if (locale == null) {
locale = Locale.getDefault();
}
try {
return localizedConverter.convert(obj, locale, timeZone, format);
} catch (ConversionException e) {
Debug.logWarning(e, "Exception thrown while converting type: ", module);
throw new GeneralException(e.getMessage(), e);
}
}
try {
return converter.convert(obj);
} catch (ConversionException e) {
Debug.logWarning(e, "Exception thrown while converting type: ", module);
throw new GeneralException(e.getMessage(), e);
}
}
// we can pretty much always do a conversion to a String, so do that here
if (targetClass.equals(String.class)) {
Debug.logWarning("No special conversion available for " + obj.getClass().getName() + " to String, returning object.toString().", module);
return obj.toString();
}
if (noTypeFail) {
throw new GeneralException("Conversion from " + obj.getClass().getName() + " to " + type + " not currently supported");
} else {
if (Debug.infoOn()) Debug.logInfo("No type conversion available for " + obj.getClass().getName() + " to " + targetClass.getName() + ", returning original object.", module);
return obj;
}
}
public static Object simpleTypeConvert(Object obj, String type, String format, Locale locale) throws GeneralException {
return simpleTypeConvert(obj, type, format, locale, true);
}
public static Boolean doRealCompare(Object value1, Object value2, String operator, String type, String format,
List<Object> messages, Locale locale, ClassLoader loader, boolean value2InlineConstant) {
boolean verboseOn = Debug.verboseOn();
if (verboseOn) Debug.logVerbose("Comparing value1: \"" + value1 + "\" " + operator + " value2:\"" + value2 + "\"", module);
try {
if (!"PlainString".equals(type)) {
Class<?> clz = loadClass(type, loader);
type = clz.getName();
}
} catch (ClassNotFoundException e) {
Debug.logWarning("The specified type [" + type + "] is not a valid class or a known special type, may see more errors later because of this: " + e.getMessage(), module);
}
if (value1 == null) {
// some default behavior for null values, results in a bit cleaner operation
if ("is-null".equals(operator)) {
return Boolean.TRUE;
} else if ("is-not-null".equals(operator)) {
return Boolean.FALSE;
} else if ("is-empty".equals(operator)) {
return Boolean.TRUE;
} else if ("is-not-empty".equals(operator)) {
return Boolean.FALSE;
} else if ("contains".equals(operator)) {
return Boolean.FALSE;
}
}
int result = 0;
Object convertedValue2 = null;
if (value2 != null) {
Locale value2Locale = locale;
if (value2InlineConstant) {
value2Locale = UtilMisc.parseLocale("en");
}
try {
convertedValue2 = simpleTypeConvert(value2, type, format, value2Locale);
} catch (GeneralException e) {
Debug.logError(e, module);
messages.add("Could not convert value2 for comparison: " + e.getMessage());
return null;
}
}
// have converted value 2, now before converting value 1 see if it is a Collection and we are doing a contains comparison
if ("contains".equals(operator) && value1 instanceof Collection<?>) {
Collection<?> col1 = (Collection<?>) value1;
return col1.contains(convertedValue2) ? Boolean.TRUE : Boolean.FALSE;
}
Object convertedValue1 = null;
try {
convertedValue1 = simpleTypeConvert(value1, type, format, locale);
} catch (GeneralException e) {
Debug.logError(e, module);
messages.add("Could not convert value1 for comparison: " + e.getMessage());
return null;
}
// handle null values...
if (convertedValue1 == null || convertedValue2 == null) {
if ("equals".equals(operator)) {
return convertedValue1 == null && convertedValue2 == null ? Boolean.TRUE : Boolean.FALSE;
} else if ("not-equals".equals(operator)) {
return convertedValue1 == null && convertedValue2 == null ? Boolean.FALSE : Boolean.TRUE;
} else if ("is-not-empty".equals(operator) || "is-empty".equals(operator)) {
// do nothing, handled later...
} else {
if (convertedValue1 == null) {
messages.add("Left value is null, cannot complete compare for the operator " + operator);
return null;
}
if (convertedValue2 == null) {
messages.add("Right value is null, cannot complete compare for the operator " + operator);
return null;
}
}
}
if ("contains".equals(operator)) {
if ("java.lang.String".equals(type) || "PlainString".equals(type)) {
String str1 = (String) convertedValue1;
String str2 = (String) convertedValue2;
return str1.indexOf(str2) < 0 ? Boolean.FALSE : Boolean.TRUE;
} else {
messages.add("Error in XML file: cannot do a contains compare between a String and a non-String type");
return null;
}
} else if ("is-empty".equals(operator)) {
if (convertedValue1 == null)
return Boolean.TRUE;
if (convertedValue1 instanceof String && ((String) convertedValue1).length() == 0)
return Boolean.TRUE;
if (convertedValue1 instanceof List<?> && ((List<?>) convertedValue1).size() == 0)
return Boolean.TRUE;
if (convertedValue1 instanceof Map<?, ?> && ((Map<?, ?>) convertedValue1).size() == 0)
return Boolean.TRUE;
return Boolean.FALSE;
} else if ("is-not-empty".equals(operator)) {
if (convertedValue1 == null)
return Boolean.FALSE;
if (convertedValue1 instanceof String && ((String) convertedValue1).length() == 0)
return Boolean.FALSE;
if (convertedValue1 instanceof List<?> && ((List<?>) convertedValue1).size() == 0)
return Boolean.FALSE;
if (convertedValue1 instanceof Map<?, ?> && ((Map<?, ?>) convertedValue1).size() == 0)
return Boolean.FALSE;
return Boolean.TRUE;
}
if ("java.lang.String".equals(type) || "PlainString".equals(type)) {
String str1 = (String) convertedValue1;
String str2 = (String) convertedValue2;
if (str1.length() == 0 || str2.length() == 0) {
if ("equals".equals(operator)) {
return str1.length() == 0 && str2.length() == 0 ? Boolean.TRUE : Boolean.FALSE;
} else if ("not-equals".equals(operator)) {
return str1.length() == 0 && str2.length() == 0 ? Boolean.FALSE : Boolean.TRUE;
} else {
messages.add("ERROR: Could not do a compare between strings with one empty string for the operator " + operator);
return null;
}
}
result = str1.compareTo(str2);
} else if ("java.lang.Double".equals(type) || "java.lang.Float".equals(type) || "java.lang.Long".equals(type) || "java.lang.Integer".equals(type) || "java.math.BigDecimal".equals(type)) {
Number tempNum = (Number) convertedValue1;
double value1Double = tempNum.doubleValue();
tempNum = (Number) convertedValue2;
double value2Double = tempNum.doubleValue();
if (value1Double < value2Double)
result = -1;
else if (value1Double > value2Double)
result = 1;
else
result = 0;
} else if ("java.sql.Date".equals(type)) {
java.sql.Date value1Date = (java.sql.Date) convertedValue1;
java.sql.Date value2Date = (java.sql.Date) convertedValue2;
result = value1Date.compareTo(value2Date);
} else if ("java.sql.Time".equals(type)) {
java.sql.Time value1Time = (java.sql.Time) convertedValue1;
java.sql.Time value2Time = (java.sql.Time) convertedValue2;
result = value1Time.compareTo(value2Time);
} else if ("java.sql.Timestamp".equals(type)) {
java.sql.Timestamp value1Timestamp = (java.sql.Timestamp) convertedValue1;
java.sql.Timestamp value2Timestamp = (java.sql.Timestamp) convertedValue2;
result = value1Timestamp.compareTo(value2Timestamp);
} else if ("java.lang.Boolean".equals(type)) {
Boolean value1Boolean = (Boolean) convertedValue1;
Boolean value2Boolean = (Boolean) convertedValue2;
if ("equals".equals(operator)) {
if ((value1Boolean.booleanValue() && value2Boolean.booleanValue()) || (!value1Boolean.booleanValue() && !value2Boolean.booleanValue()))
result = 0;
else
result = 1;
} else if ("not-equals".equals(operator)) {
if ((!value1Boolean.booleanValue() && value2Boolean.booleanValue()) || (value1Boolean.booleanValue() && !value2Boolean.booleanValue()))
result = 0;
else
result = 1;
} else {
messages.add("Can only compare Booleans using the operators 'equals' or 'not-equals'");
return null;
}
} else if ("java.lang.Object".equals(type)) {
if (convertedValue1.equals(convertedValue2)) {
result = 0;
} else {
result = 1;
}
} else {
messages.add("Type \"" + type + "\" specified for compare not supported.");
return null;
}
if (verboseOn) Debug.logVerbose("Got Compare result: " + result + ", operator: " + operator, module);
if ("less".equals(operator)) {
if (result >= 0)
return Boolean.FALSE;
} else if ("greater".equals(operator)) {
if (result <= 0)
return Boolean.FALSE;
} else if ("less-equals".equals(operator)) {
if (result > 0)
return Boolean.FALSE;
} else if ("greater-equals".equals(operator)) {
if (result < 0)
return Boolean.FALSE;
} else if ("equals".equals(operator)) {
if (result != 0)
return Boolean.FALSE;
} else if ("not-equals".equals(operator)) {
if (result == 0)
return Boolean.FALSE;
} else {
messages.add("Specified compare operator \"" + operator + "\" not known.");
return null;
}
if (verboseOn) Debug.logVerbose("Returning true", module);
return Boolean.TRUE;
}
@SuppressWarnings("unchecked")
public static boolean isEmpty(Object value) {
if (value == null) return true;
if (value instanceof String) return ((String) value).length() == 0;
if (value instanceof Collection) return ((Collection<? extends Object>) value).size() == 0;
if (value instanceof Map) return ((Map<? extends Object, ? extends Object>) value).size() == 0;
if (value instanceof CharSequence) return ((CharSequence) value).length() == 0;
if (value instanceof IsEmpty) return ((IsEmpty) value).isEmpty();
// These types would flood the log
// Number covers: BigDecimal, BigInteger, Byte, Double, Float, Integer, Long, Short
if (value instanceof Boolean) return false;
if (value instanceof Number) return false;
if (value instanceof Character) return false;
if (value instanceof java.util.Date) return false;
if (Debug.verboseOn()) {
Debug.logVerbose("In ObjectType.isEmpty(Object value) returning false for " + value.getClass() + " Object.", module);
}
return false;
}
@SuppressWarnings("serial")
public static final class NullObject implements Serializable {
public NullObject() { }
@Override
public String toString() {
return "ObjectType.NullObject";
}
@Override
public int hashCode() {
return toString().hashCode();
}
@Override
public boolean equals(Object other) {
if (other instanceof NullObject) {
// should do equality of object? don't think so, just same type
return true;
} else {
return false;
}
}
}
}
|
googleapis/google-cloud-java | 35,982 | java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3beta1/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/ExportAgentResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3beta1/agent.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.cx.v3beta1;
/**
*
*
* <pre>
* The response message for
* [Agents.ExportAgent][google.cloud.dialogflow.cx.v3beta1.Agents.ExportAgent].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse}
*/
public final class ExportAgentResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse)
ExportAgentResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ExportAgentResponse.newBuilder() to construct.
private ExportAgentResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ExportAgentResponse() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ExportAgentResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.AgentProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_ExportAgentResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.AgentProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_ExportAgentResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse.class,
com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse.Builder.class);
}
private int agentCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object agent_;
public enum AgentCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
AGENT_URI(1),
AGENT_CONTENT(2),
COMMIT_SHA(3),
AGENT_NOT_SET(0);
private final int value;
private AgentCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static AgentCase valueOf(int value) {
return forNumber(value);
}
public static AgentCase forNumber(int value) {
switch (value) {
case 1:
return AGENT_URI;
case 2:
return AGENT_CONTENT;
case 3:
return COMMIT_SHA;
case 0:
return AGENT_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public AgentCase getAgentCase() {
return AgentCase.forNumber(agentCase_);
}
public static final int AGENT_URI_FIELD_NUMBER = 1;
/**
*
*
* <pre>
* The URI to a file containing the exported agent. This field is populated
* if `agent_uri` is specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>string agent_uri = 1;</code>
*
* @return Whether the agentUri field is set.
*/
public boolean hasAgentUri() {
return agentCase_ == 1;
}
/**
*
*
* <pre>
* The URI to a file containing the exported agent. This field is populated
* if `agent_uri` is specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>string agent_uri = 1;</code>
*
* @return The agentUri.
*/
public java.lang.String getAgentUri() {
java.lang.Object ref = "";
if (agentCase_ == 1) {
ref = agent_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (agentCase_ == 1) {
agent_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* The URI to a file containing the exported agent. This field is populated
* if `agent_uri` is specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>string agent_uri = 1;</code>
*
* @return The bytes for agentUri.
*/
public com.google.protobuf.ByteString getAgentUriBytes() {
java.lang.Object ref = "";
if (agentCase_ == 1) {
ref = agent_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (agentCase_ == 1) {
agent_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int AGENT_CONTENT_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* Uncompressed raw byte content for agent. This field is populated
* if none of `agent_uri` and `git_destination` are specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>bytes agent_content = 2;</code>
*
* @return Whether the agentContent field is set.
*/
@java.lang.Override
public boolean hasAgentContent() {
return agentCase_ == 2;
}
/**
*
*
* <pre>
* Uncompressed raw byte content for agent. This field is populated
* if none of `agent_uri` and `git_destination` are specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>bytes agent_content = 2;</code>
*
* @return The agentContent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getAgentContent() {
if (agentCase_ == 2) {
return (com.google.protobuf.ByteString) agent_;
}
return com.google.protobuf.ByteString.EMPTY;
}
public static final int COMMIT_SHA_FIELD_NUMBER = 3;
/**
*
*
* <pre>
* Commit SHA of the git push. This field is populated if
* `git_destination` is specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>string commit_sha = 3;</code>
*
* @return Whether the commitSha field is set.
*/
public boolean hasCommitSha() {
return agentCase_ == 3;
}
/**
*
*
* <pre>
* Commit SHA of the git push. This field is populated if
* `git_destination` is specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>string commit_sha = 3;</code>
*
* @return The commitSha.
*/
public java.lang.String getCommitSha() {
java.lang.Object ref = "";
if (agentCase_ == 3) {
ref = agent_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (agentCase_ == 3) {
agent_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* Commit SHA of the git push. This field is populated if
* `git_destination` is specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>string commit_sha = 3;</code>
*
* @return The bytes for commitSha.
*/
public com.google.protobuf.ByteString getCommitShaBytes() {
java.lang.Object ref = "";
if (agentCase_ == 3) {
ref = agent_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (agentCase_ == 3) {
agent_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (agentCase_ == 1) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, agent_);
}
if (agentCase_ == 2) {
output.writeBytes(2, (com.google.protobuf.ByteString) agent_);
}
if (agentCase_ == 3) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, agent_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (agentCase_ == 1) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, agent_);
}
if (agentCase_ == 2) {
size +=
com.google.protobuf.CodedOutputStream.computeBytesSize(
2, (com.google.protobuf.ByteString) agent_);
}
if (agentCase_ == 3) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, agent_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse other =
(com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse) obj;
if (!getAgentCase().equals(other.getAgentCase())) return false;
switch (agentCase_) {
case 1:
if (!getAgentUri().equals(other.getAgentUri())) return false;
break;
case 2:
if (!getAgentContent().equals(other.getAgentContent())) return false;
break;
case 3:
if (!getCommitSha().equals(other.getCommitSha())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
switch (agentCase_) {
case 1:
hash = (37 * hash) + AGENT_URI_FIELD_NUMBER;
hash = (53 * hash) + getAgentUri().hashCode();
break;
case 2:
hash = (37 * hash) + AGENT_CONTENT_FIELD_NUMBER;
hash = (53 * hash) + getAgentContent().hashCode();
break;
case 3:
hash = (37 * hash) + COMMIT_SHA_FIELD_NUMBER;
hash = (53 * hash) + getCommitSha().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response message for
* [Agents.ExportAgent][google.cloud.dialogflow.cx.v3beta1.Agents.ExportAgent].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse)
com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.AgentProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_ExportAgentResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.AgentProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_ExportAgentResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse.class,
com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse.Builder.class);
}
// Construct using com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
agentCase_ = 0;
agent_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.cx.v3beta1.AgentProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_ExportAgentResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse getDefaultInstanceForType() {
return com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse build() {
com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse buildPartial() {
com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse result =
new com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse result) {
int from_bitField0_ = bitField0_;
}
private void buildPartialOneofs(
com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse result) {
result.agentCase_ = agentCase_;
result.agent_ = this.agent_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse) {
return mergeFrom((com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse other) {
if (other == com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse.getDefaultInstance())
return this;
switch (other.getAgentCase()) {
case AGENT_URI:
{
agentCase_ = 1;
agent_ = other.agent_;
onChanged();
break;
}
case AGENT_CONTENT:
{
setAgentContent(other.getAgentContent());
break;
}
case COMMIT_SHA:
{
agentCase_ = 3;
agent_ = other.agent_;
onChanged();
break;
}
case AGENT_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
agentCase_ = 1;
agent_ = s;
break;
} // case 10
case 18:
{
agent_ = input.readBytes();
agentCase_ = 2;
break;
} // case 18
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
agentCase_ = 3;
agent_ = s;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int agentCase_ = 0;
private java.lang.Object agent_;
public AgentCase getAgentCase() {
return AgentCase.forNumber(agentCase_);
}
public Builder clearAgent() {
agentCase_ = 0;
agent_ = null;
onChanged();
return this;
}
private int bitField0_;
/**
*
*
* <pre>
* The URI to a file containing the exported agent. This field is populated
* if `agent_uri` is specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>string agent_uri = 1;</code>
*
* @return Whether the agentUri field is set.
*/
@java.lang.Override
public boolean hasAgentUri() {
return agentCase_ == 1;
}
/**
*
*
* <pre>
* The URI to a file containing the exported agent. This field is populated
* if `agent_uri` is specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>string agent_uri = 1;</code>
*
* @return The agentUri.
*/
@java.lang.Override
public java.lang.String getAgentUri() {
java.lang.Object ref = "";
if (agentCase_ == 1) {
ref = agent_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (agentCase_ == 1) {
agent_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The URI to a file containing the exported agent. This field is populated
* if `agent_uri` is specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>string agent_uri = 1;</code>
*
* @return The bytes for agentUri.
*/
@java.lang.Override
public com.google.protobuf.ByteString getAgentUriBytes() {
java.lang.Object ref = "";
if (agentCase_ == 1) {
ref = agent_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (agentCase_ == 1) {
agent_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The URI to a file containing the exported agent. This field is populated
* if `agent_uri` is specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>string agent_uri = 1;</code>
*
* @param value The agentUri to set.
* @return This builder for chaining.
*/
public Builder setAgentUri(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
agentCase_ = 1;
agent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The URI to a file containing the exported agent. This field is populated
* if `agent_uri` is specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>string agent_uri = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearAgentUri() {
if (agentCase_ == 1) {
agentCase_ = 0;
agent_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The URI to a file containing the exported agent. This field is populated
* if `agent_uri` is specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>string agent_uri = 1;</code>
*
* @param value The bytes for agentUri to set.
* @return This builder for chaining.
*/
public Builder setAgentUriBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
agentCase_ = 1;
agent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Uncompressed raw byte content for agent. This field is populated
* if none of `agent_uri` and `git_destination` are specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>bytes agent_content = 2;</code>
*
* @return Whether the agentContent field is set.
*/
public boolean hasAgentContent() {
return agentCase_ == 2;
}
/**
*
*
* <pre>
* Uncompressed raw byte content for agent. This field is populated
* if none of `agent_uri` and `git_destination` are specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>bytes agent_content = 2;</code>
*
* @return The agentContent.
*/
public com.google.protobuf.ByteString getAgentContent() {
if (agentCase_ == 2) {
return (com.google.protobuf.ByteString) agent_;
}
return com.google.protobuf.ByteString.EMPTY;
}
/**
*
*
* <pre>
* Uncompressed raw byte content for agent. This field is populated
* if none of `agent_uri` and `git_destination` are specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>bytes agent_content = 2;</code>
*
* @param value The agentContent to set.
* @return This builder for chaining.
*/
public Builder setAgentContent(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
agentCase_ = 2;
agent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Uncompressed raw byte content for agent. This field is populated
* if none of `agent_uri` and `git_destination` are specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>bytes agent_content = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearAgentContent() {
if (agentCase_ == 2) {
agentCase_ = 0;
agent_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Commit SHA of the git push. This field is populated if
* `git_destination` is specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>string commit_sha = 3;</code>
*
* @return Whether the commitSha field is set.
*/
@java.lang.Override
public boolean hasCommitSha() {
return agentCase_ == 3;
}
/**
*
*
* <pre>
* Commit SHA of the git push. This field is populated if
* `git_destination` is specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>string commit_sha = 3;</code>
*
* @return The commitSha.
*/
@java.lang.Override
public java.lang.String getCommitSha() {
java.lang.Object ref = "";
if (agentCase_ == 3) {
ref = agent_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (agentCase_ == 3) {
agent_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Commit SHA of the git push. This field is populated if
* `git_destination` is specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>string commit_sha = 3;</code>
*
* @return The bytes for commitSha.
*/
@java.lang.Override
public com.google.protobuf.ByteString getCommitShaBytes() {
java.lang.Object ref = "";
if (agentCase_ == 3) {
ref = agent_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (agentCase_ == 3) {
agent_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Commit SHA of the git push. This field is populated if
* `git_destination` is specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>string commit_sha = 3;</code>
*
* @param value The commitSha to set.
* @return This builder for chaining.
*/
public Builder setCommitSha(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
agentCase_ = 3;
agent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Commit SHA of the git push. This field is populated if
* `git_destination` is specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>string commit_sha = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearCommitSha() {
if (agentCase_ == 3) {
agentCase_ = 0;
agent_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Commit SHA of the git push. This field is populated if
* `git_destination` is specified in
* [ExportAgentRequest][google.cloud.dialogflow.cx.v3beta1.ExportAgentRequest].
* </pre>
*
* <code>string commit_sha = 3;</code>
*
* @param value The bytes for commitSha to set.
* @return This builder for chaining.
*/
public Builder setCommitShaBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
agentCase_ = 3;
agent_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse)
private static final com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse();
}
public static com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ExportAgentResponse> PARSER =
new com.google.protobuf.AbstractParser<ExportAgentResponse>() {
@java.lang.Override
public ExportAgentResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ExportAgentResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ExportAgentResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.ExportAgentResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,012 | java-artifact-registry/proto-google-cloud-artifact-registry-v1beta2/src/main/java/com/google/devtools/artifactregistry/v1beta2/ListFilesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/artifactregistry/v1beta2/file.proto
// Protobuf Java Version: 3.25.8
package com.google.devtools.artifactregistry.v1beta2;
/**
*
*
* <pre>
* The response from listing files.
* </pre>
*
* Protobuf type {@code google.devtools.artifactregistry.v1beta2.ListFilesResponse}
*/
public final class ListFilesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.devtools.artifactregistry.v1beta2.ListFilesResponse)
ListFilesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListFilesResponse.newBuilder() to construct.
private ListFilesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListFilesResponse() {
files_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListFilesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.devtools.artifactregistry.v1beta2.FileProto
.internal_static_google_devtools_artifactregistry_v1beta2_ListFilesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.artifactregistry.v1beta2.FileProto
.internal_static_google_devtools_artifactregistry_v1beta2_ListFilesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.artifactregistry.v1beta2.ListFilesResponse.class,
com.google.devtools.artifactregistry.v1beta2.ListFilesResponse.Builder.class);
}
public static final int FILES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.devtools.artifactregistry.v1beta2.File> files_;
/**
*
*
* <pre>
* The files returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.File files = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.devtools.artifactregistry.v1beta2.File> getFilesList() {
return files_;
}
/**
*
*
* <pre>
* The files returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.File files = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.devtools.artifactregistry.v1beta2.FileOrBuilder>
getFilesOrBuilderList() {
return files_;
}
/**
*
*
* <pre>
* The files returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.File files = 1;</code>
*/
@java.lang.Override
public int getFilesCount() {
return files_.size();
}
/**
*
*
* <pre>
* The files returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.File files = 1;</code>
*/
@java.lang.Override
public com.google.devtools.artifactregistry.v1beta2.File getFiles(int index) {
return files_.get(index);
}
/**
*
*
* <pre>
* The files returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.File files = 1;</code>
*/
@java.lang.Override
public com.google.devtools.artifactregistry.v1beta2.FileOrBuilder getFilesOrBuilder(int index) {
return files_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The token to retrieve the next page of files, or empty if there are no
* more files to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The token to retrieve the next page of files, or empty if there are no
* more files to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < files_.size(); i++) {
output.writeMessage(1, files_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < files_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, files_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.devtools.artifactregistry.v1beta2.ListFilesResponse)) {
return super.equals(obj);
}
com.google.devtools.artifactregistry.v1beta2.ListFilesResponse other =
(com.google.devtools.artifactregistry.v1beta2.ListFilesResponse) obj;
if (!getFilesList().equals(other.getFilesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getFilesCount() > 0) {
hash = (37 * hash) + FILES_FIELD_NUMBER;
hash = (53 * hash) + getFilesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.devtools.artifactregistry.v1beta2.ListFilesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1beta2.ListFilesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1beta2.ListFilesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1beta2.ListFilesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1beta2.ListFilesResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1beta2.ListFilesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1beta2.ListFilesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1beta2.ListFilesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1beta2.ListFilesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1beta2.ListFilesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1beta2.ListFilesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1beta2.ListFilesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.devtools.artifactregistry.v1beta2.ListFilesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response from listing files.
* </pre>
*
* Protobuf type {@code google.devtools.artifactregistry.v1beta2.ListFilesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.devtools.artifactregistry.v1beta2.ListFilesResponse)
com.google.devtools.artifactregistry.v1beta2.ListFilesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.devtools.artifactregistry.v1beta2.FileProto
.internal_static_google_devtools_artifactregistry_v1beta2_ListFilesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.artifactregistry.v1beta2.FileProto
.internal_static_google_devtools_artifactregistry_v1beta2_ListFilesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.artifactregistry.v1beta2.ListFilesResponse.class,
com.google.devtools.artifactregistry.v1beta2.ListFilesResponse.Builder.class);
}
// Construct using com.google.devtools.artifactregistry.v1beta2.ListFilesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (filesBuilder_ == null) {
files_ = java.util.Collections.emptyList();
} else {
files_ = null;
filesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.devtools.artifactregistry.v1beta2.FileProto
.internal_static_google_devtools_artifactregistry_v1beta2_ListFilesResponse_descriptor;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1beta2.ListFilesResponse
getDefaultInstanceForType() {
return com.google.devtools.artifactregistry.v1beta2.ListFilesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1beta2.ListFilesResponse build() {
com.google.devtools.artifactregistry.v1beta2.ListFilesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1beta2.ListFilesResponse buildPartial() {
com.google.devtools.artifactregistry.v1beta2.ListFilesResponse result =
new com.google.devtools.artifactregistry.v1beta2.ListFilesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.devtools.artifactregistry.v1beta2.ListFilesResponse result) {
if (filesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
files_ = java.util.Collections.unmodifiableList(files_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.files_ = files_;
} else {
result.files_ = filesBuilder_.build();
}
}
private void buildPartial0(
com.google.devtools.artifactregistry.v1beta2.ListFilesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.devtools.artifactregistry.v1beta2.ListFilesResponse) {
return mergeFrom((com.google.devtools.artifactregistry.v1beta2.ListFilesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.devtools.artifactregistry.v1beta2.ListFilesResponse other) {
if (other
== com.google.devtools.artifactregistry.v1beta2.ListFilesResponse.getDefaultInstance())
return this;
if (filesBuilder_ == null) {
if (!other.files_.isEmpty()) {
if (files_.isEmpty()) {
files_ = other.files_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureFilesIsMutable();
files_.addAll(other.files_);
}
onChanged();
}
} else {
if (!other.files_.isEmpty()) {
if (filesBuilder_.isEmpty()) {
filesBuilder_.dispose();
filesBuilder_ = null;
files_ = other.files_;
bitField0_ = (bitField0_ & ~0x00000001);
filesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getFilesFieldBuilder()
: null;
} else {
filesBuilder_.addAllMessages(other.files_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.devtools.artifactregistry.v1beta2.File m =
input.readMessage(
com.google.devtools.artifactregistry.v1beta2.File.parser(),
extensionRegistry);
if (filesBuilder_ == null) {
ensureFilesIsMutable();
files_.add(m);
} else {
filesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.devtools.artifactregistry.v1beta2.File> files_ =
java.util.Collections.emptyList();
private void ensureFilesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
files_ = new java.util.ArrayList<com.google.devtools.artifactregistry.v1beta2.File>(files_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.devtools.artifactregistry.v1beta2.File,
com.google.devtools.artifactregistry.v1beta2.File.Builder,
com.google.devtools.artifactregistry.v1beta2.FileOrBuilder>
filesBuilder_;
/**
*
*
* <pre>
* The files returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.File files = 1;</code>
*/
public java.util.List<com.google.devtools.artifactregistry.v1beta2.File> getFilesList() {
if (filesBuilder_ == null) {
return java.util.Collections.unmodifiableList(files_);
} else {
return filesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The files returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.File files = 1;</code>
*/
public int getFilesCount() {
if (filesBuilder_ == null) {
return files_.size();
} else {
return filesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The files returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.File files = 1;</code>
*/
public com.google.devtools.artifactregistry.v1beta2.File getFiles(int index) {
if (filesBuilder_ == null) {
return files_.get(index);
} else {
return filesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The files returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.File files = 1;</code>
*/
public Builder setFiles(int index, com.google.devtools.artifactregistry.v1beta2.File value) {
if (filesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFilesIsMutable();
files_.set(index, value);
onChanged();
} else {
filesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The files returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.File files = 1;</code>
*/
public Builder setFiles(
int index, com.google.devtools.artifactregistry.v1beta2.File.Builder builderForValue) {
if (filesBuilder_ == null) {
ensureFilesIsMutable();
files_.set(index, builderForValue.build());
onChanged();
} else {
filesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The files returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.File files = 1;</code>
*/
public Builder addFiles(com.google.devtools.artifactregistry.v1beta2.File value) {
if (filesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFilesIsMutable();
files_.add(value);
onChanged();
} else {
filesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The files returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.File files = 1;</code>
*/
public Builder addFiles(int index, com.google.devtools.artifactregistry.v1beta2.File value) {
if (filesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFilesIsMutable();
files_.add(index, value);
onChanged();
} else {
filesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The files returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.File files = 1;</code>
*/
public Builder addFiles(
com.google.devtools.artifactregistry.v1beta2.File.Builder builderForValue) {
if (filesBuilder_ == null) {
ensureFilesIsMutable();
files_.add(builderForValue.build());
onChanged();
} else {
filesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The files returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.File files = 1;</code>
*/
public Builder addFiles(
int index, com.google.devtools.artifactregistry.v1beta2.File.Builder builderForValue) {
if (filesBuilder_ == null) {
ensureFilesIsMutable();
files_.add(index, builderForValue.build());
onChanged();
} else {
filesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The files returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.File files = 1;</code>
*/
public Builder addAllFiles(
java.lang.Iterable<? extends com.google.devtools.artifactregistry.v1beta2.File> values) {
if (filesBuilder_ == null) {
ensureFilesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, files_);
onChanged();
} else {
filesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The files returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.File files = 1;</code>
*/
public Builder clearFiles() {
if (filesBuilder_ == null) {
files_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
filesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The files returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.File files = 1;</code>
*/
public Builder removeFiles(int index) {
if (filesBuilder_ == null) {
ensureFilesIsMutable();
files_.remove(index);
onChanged();
} else {
filesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The files returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.File files = 1;</code>
*/
public com.google.devtools.artifactregistry.v1beta2.File.Builder getFilesBuilder(int index) {
return getFilesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The files returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.File files = 1;</code>
*/
public com.google.devtools.artifactregistry.v1beta2.FileOrBuilder getFilesOrBuilder(int index) {
if (filesBuilder_ == null) {
return files_.get(index);
} else {
return filesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The files returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.File files = 1;</code>
*/
public java.util.List<? extends com.google.devtools.artifactregistry.v1beta2.FileOrBuilder>
getFilesOrBuilderList() {
if (filesBuilder_ != null) {
return filesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(files_);
}
}
/**
*
*
* <pre>
* The files returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.File files = 1;</code>
*/
public com.google.devtools.artifactregistry.v1beta2.File.Builder addFilesBuilder() {
return getFilesFieldBuilder()
.addBuilder(com.google.devtools.artifactregistry.v1beta2.File.getDefaultInstance());
}
/**
*
*
* <pre>
* The files returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.File files = 1;</code>
*/
public com.google.devtools.artifactregistry.v1beta2.File.Builder addFilesBuilder(int index) {
return getFilesFieldBuilder()
.addBuilder(
index, com.google.devtools.artifactregistry.v1beta2.File.getDefaultInstance());
}
/**
*
*
* <pre>
* The files returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.File files = 1;</code>
*/
public java.util.List<com.google.devtools.artifactregistry.v1beta2.File.Builder>
getFilesBuilderList() {
return getFilesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.devtools.artifactregistry.v1beta2.File,
com.google.devtools.artifactregistry.v1beta2.File.Builder,
com.google.devtools.artifactregistry.v1beta2.FileOrBuilder>
getFilesFieldBuilder() {
if (filesBuilder_ == null) {
filesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.devtools.artifactregistry.v1beta2.File,
com.google.devtools.artifactregistry.v1beta2.File.Builder,
com.google.devtools.artifactregistry.v1beta2.FileOrBuilder>(
files_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
files_ = null;
}
return filesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The token to retrieve the next page of files, or empty if there are no
* more files to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The token to retrieve the next page of files, or empty if there are no
* more files to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The token to retrieve the next page of files, or empty if there are no
* more files to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The token to retrieve the next page of files, or empty if there are no
* more files to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The token to retrieve the next page of files, or empty if there are no
* more files to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.devtools.artifactregistry.v1beta2.ListFilesResponse)
}
// @@protoc_insertion_point(class_scope:google.devtools.artifactregistry.v1beta2.ListFilesResponse)
private static final com.google.devtools.artifactregistry.v1beta2.ListFilesResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.devtools.artifactregistry.v1beta2.ListFilesResponse();
}
public static com.google.devtools.artifactregistry.v1beta2.ListFilesResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListFilesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListFilesResponse>() {
@java.lang.Override
public ListFilesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListFilesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListFilesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1beta2.ListFilesResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/lens | 35,918 | lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lens.driver.jdbc;
import static org.apache.lens.driver.jdbc.JDBCDriverConfConstants.*;
import static org.apache.lens.driver.jdbc.JDBCDriverConfConstants.ConnectionPoolProperties.*;
import static org.apache.lens.server.api.LensConfConstants.DRIVER_COST_TYPE_RANGES;
import static org.testng.Assert.*;
import java.sql.*;
import java.util.*;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.apache.lens.api.LensConf;
import org.apache.lens.api.query.QueryCostType;
import org.apache.lens.api.query.QueryHandle;
import org.apache.lens.api.query.ResultRow;
import org.apache.lens.server.api.LensConfConstants;
import org.apache.lens.server.api.driver.*;
import org.apache.lens.server.api.driver.DriverQueryStatus.DriverQueryState;
import org.apache.lens.server.api.error.LensException;
import org.apache.lens.server.api.metrics.LensMetricsRegistry;
import org.apache.lens.server.api.query.ExplainQueryContext;
import org.apache.lens.server.api.query.PreparedQueryContext;
import org.apache.lens.server.api.query.QueryContext;
import org.apache.lens.server.api.query.cost.QueryCost;
import org.apache.lens.server.api.query.cost.StaticQueryCost;
import org.apache.lens.server.api.util.LensUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hive.service.cli.ColumnDescriptor;
import org.testng.Assert;
import org.testng.annotations.*;
import com.codahale.metrics.MetricRegistry;
import com.google.common.collect.Lists;
import com.mchange.v2.c3p0.ComboPooledDataSource;
import lombok.extern.slf4j.Slf4j;
/**
* The Class TestJdbcDriver.
*/
@Slf4j
public class TestJdbcDriver {
/** The base conf. */
Configuration baseConf;
HiveConf hConf;
/** The driver. */
JDBCDriver driver;
Collection<LensDriver> drivers;
static final StaticQueryCost JDBC_COST = new StaticQueryCost(0.0, QueryCostType.LOW);
/**
* Test create jdbc driver.
*
* @throws Exception the exception
*/
@BeforeTest
public void testCreateJdbcDriver() throws Exception {
baseConf = new Configuration();
baseConf.set(JDBC_DRIVER_CLASS, "org.hsqldb.jdbc.JDBCDriver");
baseConf.set(JDBC_DB_URI, "jdbc:hsqldb:mem:jdbcTestDB");
baseConf.set(JDBC_USER, "SA");
baseConf.set(JDBC_PASSWORD, "");
baseConf.set(JDBC_EXPLAIN_KEYWORD_PARAM, "explain plan for ");
baseConf.set(DRIVER_COST_TYPE_RANGES, "VERY_LOW,0.0,LOW,0.001,HIGH");
hConf = new HiveConf(baseConf, this.getClass());
driver = new JDBCDriver();
driver.configure(baseConf, "jdbc", "jdbc1");
assertNotNull(driver);
assertTrue(driver.configured);
drivers = Lists.<LensDriver>newArrayList(driver);
}
/**
* Close.
*
* @throws Exception the exception
*/
@AfterTest
public void close() throws Exception {
driver.close();
}
@BeforeMethod
public void beforeMethod() throws Exception {
if (SessionState.get() == null) {
SessionState.start(new HiveConf(baseConf, TestJdbcDriver.class));
}
}
private QueryContext createQueryContext(final String query) throws LensException {
return createQueryContext(query, baseConf);
}
private QueryContext createQueryContext(final String query, Configuration conf) throws LensException {
QueryContext context = new QueryContext(query, "SA", new LensConf(), conf, drivers);
return context;
}
protected ExplainQueryContext createExplainContext(final String query, Configuration conf) {
ExplainQueryContext ectx = new ExplainQueryContext(UUID.randomUUID().toString(), query, "testuser", null, conf,
drivers);
return ectx;
}
/**
* Creates the table.
*
* @param table the table
* @throws Exception the exception
*/
synchronized void createTable(String table) throws Exception {
createTable(table, null);
}
synchronized void createTable(String table, Connection conn) throws Exception {
runTestSetupQuery(conn, "CREATE TABLE " + table + " (ID INT)");
}
void runTestSetupQuery(Connection conn, String query) throws Exception {
Statement stmt = null;
try {
if (conn == null) {
conn = driver.getConnection();
}
stmt = conn.createStatement();
stmt.execute(query);
conn.commit();
} finally {
if (stmt != null) {
stmt.close();
}
if (conn != null) {
conn.close();
}
}
}
void insertData(String table) throws Exception {
insertData(table, null);
}
void insertData(String table, Connection conn) throws Exception {
insertData(table, conn, 10);
}
/**
* Insert data.
*
* @param table the table
* @throws Exception the exception
*/
void insertData(String table, Connection conn, int numRows) throws Exception {
PreparedStatement stmt = null;
try {
if (conn == null) {
conn = driver.getConnection();
}
stmt = conn.prepareStatement("INSERT INTO " + table + " VALUES(?)");
for (int i = 0; i < numRows; i++) {
stmt.setInt(1, i);
stmt.executeUpdate();
}
conn.commit();
} finally {
if (stmt != null) {
stmt.close();
}
if (conn != null) {
conn.close();
}
}
}
/**
* Test ddl queries.
*/
@Test
public void testDDLQueries() {
String query = "DROP TABLE TEMP";
Throwable th = null;
try {
driver.rewriteQuery(createQueryContext(query));
} catch (LensException e) {
log.error("Error running DDL query.", e);
th = e;
}
Assert.assertNotNull(th);
query = "create table temp(name string, msr int)";
th = null;
try {
driver.rewriteQuery(createQueryContext(query));
} catch (LensException e) {
log.error("Error running DDL query", e);
th = e;
}
Assert.assertNotNull(th);
query = "insert overwrite table temp SELECT * FROM execute_test";
th = null;
try {
driver.rewriteQuery(createQueryContext(query));
} catch (LensException e) {
log.error("Error running DDL query", e);
th = e;
}
Assert.assertNotNull(th);
query = "create table temp2 as SELECT * FROM execute_test";
th = null;
try {
driver.rewriteQuery(createQueryContext(query));
} catch (LensException e) {
log.error("Error running DDL query", e);
th = e;
}
Assert.assertNotNull(th);
}
/**
* Test estimate.
*
* @throws Exception the exception
*/
@Test
public void testEstimate() throws Exception {
createTable("estimate_test", driver.getEstimateConnection()); // Create table
insertData("estimate_test", driver.getEstimateConnection()); // Insert some data into table
String query1 = "SELECT * FROM estimate_test"; // Select query against existing table
ExplainQueryContext ctx = createExplainContext(query1, baseConf);
Assert.assertNull(ctx.getFinalDriverQuery(driver));
QueryCost cost = driver.estimate(ctx);
Assert.assertEquals(cost, JDBC_COST);
Assert.assertNotNull(ctx.getFinalDriverQuery(driver));
// Test connection leak for estimate
final int maxEstimateConnections =
driver.getEstimateConnectionConf().getInt(JDBC_POOL_MAX_SIZE.getConfigKey(), 50);
for (int i = 0; i < maxEstimateConnections + 10; i++) {
try {
log.info("Iteration#{}", (i + 1));
String query = i > maxEstimateConnections ? "SELECT * FROM estimate_test" : "CREATE TABLE FOO(ID INT)";
ExplainQueryContext context = createExplainContext(query, baseConf);
cost = driver.estimate(context);
} catch (LensException exc) {
Throwable th = exc.getCause();
while (th != null) {
assertFalse(th instanceof SQLException);
th = th.getCause();
}
}
}
}
/**
* Test estimate failing
*
* @throws Exception the exception
*/
@Test
public void testEstimateFailing() throws Exception {
String query2 = "SELECT * FROM estimate_test2"; // Select query against non existing table
try {
driver.estimate(createExplainContext(query2, baseConf));
Assert.fail("Running estimate on a non existing table.");
} catch (LensException ex) {
Assert.assertEquals(LensUtil.getCauseMessage(ex), "user lacks privilege or object not found: ESTIMATE_TEST2");
}
}
/**
* Test estimate failing
*
* @throws Exception the exception
*/
@Test
public void testEstimateGauges() throws Exception {
createTable("estimate_test_gauge", driver.getEstimateConnection()); // Create table
insertData("estimate_test_gauge", driver.getEstimateConnection()); // Insert some data into table
String query1 = "SELECT * FROM estimate_test_gauge"; // Select query against existing table
Configuration metricConf = new Configuration(baseConf);
metricConf.set(LensConfConstants.QUERY_METRIC_UNIQUE_ID_CONF_KEY, TestJdbcDriver.class.getSimpleName());
driver.estimate(createExplainContext(query1, metricConf));
MetricRegistry reg = LensMetricsRegistry.getStaticRegistry();
String driverQualifiledName = driver.getFullyQualifiedName();
Assert.assertTrue(reg.getGauges().keySet().containsAll(Arrays.asList(
"lens.MethodMetricGauge.TestJdbcDriver-"+driverQualifiledName+"-validate-columnar-sql-rewrite",
"lens.MethodMetricGauge.TestJdbcDriver-"+driverQualifiledName+"-validate-jdbc-prepare-statement",
"lens.MethodMetricGauge.TestJdbcDriver-"+driverQualifiledName+"-validate-thru-prepare",
"lens.MethodMetricGauge.TestJdbcDriver-"+driverQualifiledName+"-jdbc-check-allowed-query")));
}
@Test
public void testMetricsEnabled() throws Exception {
createTable("test_metrics", driver.getEstimateConnection()); // Create table
insertData("test_metrics", driver.getEstimateConnection()); // Insert some data into table
createTable("test_metrics"); // Create table
insertData("test_metrics"); // Insert some data into table
String query1 = "SELECT * FROM test_metrics"; // Select query against existing table
Configuration metricConf = new Configuration(baseConf);
metricConf.setBoolean(LensConfConstants.ENABLE_QUERY_METRICS, true);
// run estimate and execute - because server would first run estimate and then execute with same context
QueryContext ctx = createQueryContext(query1, metricConf);
QueryCost cost = driver.estimate(ctx);
Assert.assertEquals(cost, JDBC_COST);
LensResultSet result = driver.execute(ctx);
Assert.assertNotNull(result);
// test prepare
// run estimate and prepare - because server would first run estimate and then prepare with same context
PreparedQueryContext pContext = new PreparedQueryContext(query1, "SA", metricConf, drivers);
cost = driver.estimate(pContext);
Assert.assertEquals(cost, JDBC_COST);
driver.prepare(pContext);
// test explain and prepare
PreparedQueryContext pContext2 = new PreparedQueryContext(query1, "SA", metricConf, drivers);
cost = driver.estimate(pContext2);
Assert.assertEquals(cost, JDBC_COST);
driver.prepare(pContext2);
driver.explainAndPrepare(pContext2);
}
/**
* Test explain.
*
* @throws Exception the exception
*/
@Test
public void testExplain() throws Exception {
createTable("explain_test"); // Create table
insertData("explain_test"); // Insert some data into table
String query1 = "SELECT * FROM explain_test"; // Select query against existing table
String query2 = "SELECT * FROM explain_test1"; // Select query against non existing table
ExplainQueryContext ctx = createExplainContext(query1, baseConf);
Assert.assertNull(ctx.getFinalDriverQuery(driver));
driver.explain(ctx);
Assert.assertNotNull(ctx.getFinalDriverQuery(driver));
try {
driver.explain(createExplainContext(query2, baseConf));
Assert.fail("Running explain on a non existing table.");
} catch (LensException ex) {
System.out.println("Error : " + ex);
}
}
/**
* Test execute.
*
* @throws Exception the exception
*/
@Test
public void testExecute() throws Exception {
createTable("execute_test");
// Insert some data into table
insertData("execute_test");
// Query
final String query = "SELECT * FROM execute_test";
QueryContext context = createQueryContext(query);
LensResultSet resultSet = driver.execute(context);
assertNotNull(resultSet);
if (resultSet instanceof InMemoryResultSet) {
InMemoryResultSet rs = (InMemoryResultSet) resultSet;
LensResultSetMetadata rsMeta = rs.getMetadata();
assertEquals(rsMeta.getColumns().size(), 1);
ColumnDescriptor col1 = rsMeta.getColumns().get(0);
assertEquals(col1.getTypeName().toLowerCase(), "int");
assertEquals(col1.getName(), "ID");
while (rs.hasNext()) {
ResultRow row = rs.next();
List<Object> rowObjects = row.getValues();
}
if (rs instanceof JDBCResultSet) {
((JDBCResultSet) rs).close();
}
}
}
@Test
public void testJDBCMaxConnectionConstraintCheck() throws Exception {
close();
// Create table execute_test
createTable("max_connection_test");
// Insert some data into table
insertData("max_connection_test");
MaxJDBCConnectionCheckConstraintFactory factory = new MaxJDBCConnectionCheckConstraintFactory();
MaxJDBCConnectionCheckConstraint constraint = factory.create(driver.getConf());
// check constraint in driver
assertTrue(driver.getQueryConstraints().toString().contains("MaxJDBCConnectionCheckConstraint"));
String query;
QueryContext context = createQueryContext("SELECT * FROM max_connection_test");
for (int i = 1; i <= JDBC_POOL_MAX_SIZE.getDefaultValue(); i++) {
query = "SELECT " + i + " FROM max_connection_test";
context = createQueryContext(query);
driver.executeAsync(context);
}
//pool max size is same as number of query context hold on driver
assertEquals(driver.getQueryContextMap().size(), JDBC_POOL_MAX_SIZE.getDefaultValue());
//new query shouldn't be allowed
QueryContext newcontext = createQueryContext("SELECT 123 FROM max_connection_test");
assertNotNull(constraint.allowsLaunchOf(newcontext, null));
//close one query and launch the previous query again
driver.closeQuery(context.getQueryHandle());
assertNull(constraint.allowsLaunchOf(newcontext, null));
close();
}
/**
* Data provider for test case {@link #testExecuteWithPreFetch(int, boolean, int, boolean, long)} ()}
* @return data
*/
@DataProvider
public Object[][] executeWithPreFetchDP() {
return new Object[][] {
//int rowsToPreFecth, boolean isComplteleyFetched, int rowsPreFetched, boolean createTable, long executeTimeout
{10, true, 10, true, 20000}, //result has 10 rows and all 10 rows are pre fetched
{5, false, 6, false, 8000}, //result has 10 rows and 5 rows are pre fetched. (Extra row is fetched = 5+1 = 6)
{15, true, 10, false, 8000}, //result has 10 rows and 15 rows are requested to be pre fetched
{10, false, 0, false, 10}, //similar to case 1 but executeTimeout is very less.
};
}
/**Testjdbcdri
* @param rowsToPreFecth : requested number of rows to be pre-fetched
* @param isComplteleyFetched : whether the wrapped in memory result has been completely accessed due to pre fetch
* @param rowsPreFetched : actual rows pre-fetched
* @param createTable : whether to create a table before the test case is run
* @param executeTimeoutMillis :If the query does not finish with in this time pre fetch is ignored.
* @throws Exception
*/
@Test(dataProvider = "executeWithPreFetchDP")
public void testExecuteWithPreFetch(int rowsToPreFecth, boolean isComplteleyFetched, int rowsPreFetched,
boolean createTable, long executeTimeoutMillis) throws Exception {
if (createTable) {
createTable("execute_prefetch_test");
insertData("execute_prefetch_test");
}
// Query
final String query = "SELECT * FROM execute_prefetch_test";
Configuration conf = new Configuration(baseConf);
conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_SET, true);
conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
conf.setBoolean(LensConfConstants.PREFETCH_INMEMORY_RESULTSET, true);
conf.setInt(LensConfConstants.PREFETCH_INMEMORY_RESULTSET_ROWS, rowsToPreFecth);
QueryContext context = createQueryContext(query, conf);
context.setExecuteTimeoutMillis(executeTimeoutMillis);
driver.executeAsync(context);
while (!context.getDriverStatus().isFinished()) {
Thread.sleep(1000);
}
LensResultSet resultSet = driver.fetchResultSet(context);
assertNotNull(resultSet);
//Check Type
if (executeTimeoutMillis > 1000) { //enough time to execute the query
assertTrue(resultSet instanceof PartiallyFetchedInMemoryResultSet);
} else {
assertFalse(resultSet instanceof PartiallyFetchedInMemoryResultSet);
return; // NO need to check further in this case
}
PartiallyFetchedInMemoryResultSet prs = (PartiallyFetchedInMemoryResultSet) resultSet;
assertEquals(prs.isComplteleyFetched(), isComplteleyFetched);
//Check Streaming flow
if (isComplteleyFetched) {
assertTrue(prs.isComplteleyFetched());
prs.getPreFetchedRows(); //This will be called while streaming
assertEquals(prs.size().intValue(), rowsPreFetched);
} else {
assertFalse(prs.isComplteleyFetched());
assertEquals(prs.getPreFetchedRows().size(), rowsPreFetched);
}
assertEquals(prs.getMetadata().getColumns().size(), 1);
assertEquals(prs.getMetadata().getColumns().get(0).getName(), "ID");
// Check Persistence flow
int rowCount = 0;
while (prs.hasNext()) {
ResultRow row = prs.next();
assertEquals(row.getValues().get(0), rowCount);
rowCount++;
}
assertEquals(rowCount, 10);
prs.setFullyAccessed(true);
//Check Purge
assertEquals(prs.canBePurged(), true);
}
@Test
public void testJdbcSqlException() throws Exception {
final String query = "SELECT invalid_column FROM execute_test";
try {
PreparedQueryContext pContext = new PreparedQueryContext(query, "SA", baseConf, drivers);
driver.validate(pContext);
driver.prepare(pContext);
} catch (LensException e) {
assertEquals(e.getErrorInfo().getErrorCode(), 4001);
assertEquals(e.getErrorInfo().getErrorName(), "SEMANTIC_ERROR");
assertTrue(e.getMessage().contains("user lacks privilege or object not found: EXECUTE_TEST"));
}
}
/**
* Test type casting of char, varchar, nvarchar and decimal type
*
* @throws Exception
* the exception
*/
@Test
public void tesDecimalCharCasting() throws Exception {
Statement stmt = null;
Connection conn = null;
try {
conn = driver.getConnection();
stmt = conn.createStatement();
// Create table with char, varchar, nvarchar and decimal data type
stmt.execute("CREATE TABLE test_casting(c1 decimal(10,2), c2 varchar(20), c3 nvarchar(20), c4 char(10))");
// Insert data
stmt.execute("INSERT INTO test_casting VALUES(34.56,'abc','def','ghi')");
stmt.execute("INSERT INTO test_casting VALUES(78.50,'abc1','def1','ghi1')");
stmt.execute("INSERT INTO test_casting VALUES(48.89,'abc2','def2','ghi2')");
conn.commit();
// Query
final String query = "SELECT * FROM test_casting";
QueryContext context = createQueryContext(query);
LensResultSet resultSet = driver.execute(context);
assertNotNull(resultSet);
if (resultSet instanceof InMemoryResultSet) {
InMemoryResultSet rs = (InMemoryResultSet) resultSet;
LensResultSetMetadata rsMeta = rs.getMetadata();
assertEquals(rsMeta.getColumns().size(), 4);
ColumnDescriptor col1 = rsMeta.getColumns().get(0);
assertEquals(col1.getTypeName().toLowerCase(), "double");
assertEquals(col1.getName(), "C1");
ColumnDescriptor col2 = rsMeta.getColumns().get(1);
assertEquals(col2.getTypeName().toLowerCase(), "string");
assertEquals(col2.getName(), "C2");
ColumnDescriptor col3 = rsMeta.getColumns().get(2);
assertEquals(col3.getTypeName().toLowerCase(), "string");
assertEquals(col3.getName(), "C3");
ColumnDescriptor col4 = rsMeta.getColumns().get(3);
assertEquals(col4.getTypeName().toLowerCase(), "string");
assertEquals(col4.getName(), "C4");
while (rs.hasNext()) {
ResultRow row = rs.next();
List<Object> rowObjects = row.getValues();
}
if (rs instanceof JDBCResultSet) {
((JDBCResultSet) rs).close();
}
}
} finally {
if (stmt != null) {
stmt.close();
}
if (conn != null) {
conn.close();
}
}
}
/**
* Test prepare.
*
* @throws Exception the exception
*/
@Test
public void testPrepare() throws Exception {
// In this test we are testing both prepare and validate. Since in the test config
// We are using different DBs for estimate pool and query pool, we have to create
// tables in both DBs.
createTable("prepare_test");
createTable("prepare_test", driver.getEstimateConnection());
insertData("prepare_test");
insertData("prepare_test", driver.getEstimateConnection());
final String query = "SELECT * from prepare_test";
PreparedQueryContext pContext = new PreparedQueryContext(query, "SA", baseConf, drivers);
//run validate
driver.validate(pContext);
//run prepare
driver.prepare(pContext);
}
/**
* Test prepare failing
*
* @throws Exception the exception
*/
@Test
public void testPrepareFailing() throws Exception {
String query = "SELECT * FROM prepare_test2"; // Select query against non existing table
try {
PreparedQueryContext pContext = new PreparedQueryContext(query, "SA", baseConf, drivers);
driver.prepare(pContext);
Assert.fail("Running prepare on a non existing table.");
} catch (LensException ex) {
Assert.assertEquals(LensUtil.getCauseMessage(ex), "user lacks privilege or object not found: PREPARE_TEST2");
}
}
/**
* Test prepare skip warnings
*
* @throws Exception the exception
*/
@Test
public void testPrepareSkipWarnings() throws Exception {
createTable("prepare_test3");
createTable("prepare_test3", driver.getEstimateConnection());
insertData("prepare_test3");
insertData("prepare_test3", driver.getEstimateConnection());
final String query = "SELECT * from prepare_test3";
PreparedQueryContext pContext = new PreparedQueryContext(query, "SA", baseConf, drivers);
pContext.getDriverConf(driver).setBoolean(JDBC_VALIDATE_SKIP_WARNINGS, true);
//run validate
driver.validate(pContext);
//run prepare
driver.prepare(pContext);
}
/**
* Test execute async.
*
* @throws Exception the exception
*/
@Test
public void testExecuteAsync() throws Exception {
createTable("execute_async_test");
insertData("execute_async_test");
final String query = "SELECT * FROM execute_async_test";
QueryContext context = createQueryContext(query);
System.out.println("@@@ Test_execute_async:" + context.getQueryHandle());
final CountDownLatch listenerNotificationLatch = new CountDownLatch(1);
QueryCompletionListener listener = new QueryCompletionListener() {
@Override
public void onError(QueryHandle handle, String error) {
fail("Query failed " + handle + " message" + error);
}
@Override
public void onCompletion(QueryHandle handle) {
System.out.println("@@@@ Query is complete " + handle);
listenerNotificationLatch.countDown();
}
};
executeAsync(context);
QueryHandle handle = context.getQueryHandle();
driver.registerForCompletionNotification(context, 0, listener);
while (true) {
driver.updateStatus(context);
System.out.println("Query: " + handle + " Status: " + context.getDriverStatus());
if (context.getDriverStatus().isFinished()) {
assertEquals(context.getDriverStatus().getState(), DriverQueryState.SUCCESSFUL);
assertEquals(context.getDriverStatus().getProgress(), 1.0);
break;
}
Thread.sleep(500);
}
assertTrue(context.getDriverStatus().getDriverStartTime() > 0);
assertTrue(context.getDriverStatus().getDriverFinishTime() > 0);
// make sure query completion listener was called with onCompletion
try {
listenerNotificationLatch.await(1, TimeUnit.SECONDS);
} catch (Exception e) {
fail("Query completion listener was not notified - " + e.getMessage());
log.error("Query completion listener was not notified.", e);
}
LensResultSet grs = driver.fetchResultSet(context);
// Check multiple fetchResultSet return same object
for (int i = 0; i < 5; i++) {
assertTrue(grs == driver.fetchResultSet(context));
}
assertNotNull(grs);
if (grs instanceof InMemoryResultSet) {
InMemoryResultSet rs = (InMemoryResultSet) grs;
LensResultSetMetadata rsMeta = rs.getMetadata();
assertEquals(rsMeta.getColumns().size(), 1);
ColumnDescriptor col1 = rsMeta.getColumns().get(0);
assertEquals(col1.getTypeName().toLowerCase(), "int");
assertEquals(col1.getName(), "ID");
System.out.println("Matched metadata");
while (rs.hasNext()) {
List<Object> vals = rs.next().getValues();
assertEquals(vals.size(), 1);
assertEquals(vals.get(0).getClass(), Integer.class);
}
driver.closeQuery(handle);
} else {
fail("Only in memory result set is supported as of now");
}
}
/**
* Test connection close for failed queries.
*
* @throws Exception the exception
*/
@Test
public void testConnectionCloseForFailedQueries() throws Exception {
createTable("invalid_conn_close");
insertData("invalid_conn_close");
final String query = "SELECT * from invalid_conn_close2";
QueryContext ctx = new QueryContext(query, "SA", new LensConf(), baseConf, drivers);
for (int i = 0; i < JDBC_POOL_MAX_SIZE.getDefaultValue(); i++) {
executeAsync(ctx);
driver.updateStatus(ctx);
System.out.println("@@@@ QUERY " + (i + 1));
}
String validQuery = "SELECT * FROM invalid_conn_close";
QueryContext validCtx = createQueryContext(validQuery);
System.out.println("@@@ Submitting valid query");
executeAsync(validCtx);
// Wait for query to finish
while (true) {
driver.updateStatus(validCtx);
if (validCtx.getDriverStatus().isFinished()) {
break;
}
Thread.sleep(1000);
}
driver.closeQuery(validCtx.getQueryHandle());
}
private void executeAsync(QueryContext ctx) throws LensException {
driver.executeAsync(ctx);
}
/**
* Test connection close for successful queries.
*
* @throws Exception the exception
*/
@Test
public void testConnectionCloseForSuccessfulQueries() throws Exception {
createTable("valid_conn_close");
insertData("valid_conn_close");
final String query = "SELECT * from valid_conn_close";
QueryContext ctx = createQueryContext(query);
for (int i = 0; i < ConnectionPoolProperties.JDBC_POOL_MAX_SIZE.getDefaultValue(); i++) {
LensResultSet resultSet = driver.execute(ctx);
assertNotNull(resultSet);
if (resultSet instanceof InMemoryResultSet) {
InMemoryResultSet rs = (InMemoryResultSet) resultSet;
LensResultSetMetadata rsMeta = rs.getMetadata();
assertEquals(rsMeta.getColumns().size(), 1);
ColumnDescriptor col1 = rsMeta.getColumns().get(0);
assertEquals(col1.getTypeName().toLowerCase(), "int");
assertEquals(col1.getName(), "ID");
while (rs.hasNext()) {
ResultRow row = rs.next();
List<Object> rowObjects = row.getValues();
}
}
System.out.println("@@@@ QUERY " + (i + 1));
}
String validQuery = "SELECT * FROM valid_conn_close";
QueryContext validCtx = createQueryContext(validQuery);
System.out.println("@@@ Submitting query after pool quota used");
driver.execute(validCtx);
}
public static int sleep(int t) {
try {
log.info("Sleeping for {} seconds", t);
Thread.sleep(t * 1000);
} catch (InterruptedException ie) {
// ignore
}
return t;
}
@DataProvider(name = "waitBeforeCancel")
public Object[][] mediaTypeData() {
return new Object[][] {
{true},
{false},
};
}
boolean setupCancel = false;
private void setupCancelQuery() throws Exception {
if (!setupCancel) {
createTable("cancel_query_test");
insertData("cancel_query_test", null, 1);
final String function = "create function sleep(t int) returns int no sql language java PARAMETER STYLE JAVA"
+ " EXTERNAL NAME 'CLASSPATH:org.apache.lens.driver.jdbc.TestJdbcDriver.sleep'";
runTestSetupQuery(null, function);
setupCancel = true;
}
}
/**
* Test cancel query.
*
* @throws Exception the exception
*/
@Test(dataProvider = "waitBeforeCancel")
public void testCancelQuery(boolean waitBeforeCancel) throws Exception {
setupCancelQuery();
// picked function as positive with udf mapping to sleep - sothat the signature of both are same.
// Here we need a UDF mapping because the function sleep is not available in Hive functions and semantic analysis
// would fail otherwise.
final String query = "SELECT positive(5) FROM cancel_query_test";
QueryContext context = createQueryContext(query);
System.out.println("@@@ test_cancel:" + context.getQueryHandle());
executeAsync(context);
QueryHandle handle = context.getQueryHandle();
// without wait query may not be launched.
if (waitBeforeCancel) {
Thread.sleep(1000);
}
boolean isCancelled = driver.cancelQuery(handle);
driver.updateStatus(context);
assertTrue(isCancelled);
assertEquals(context.getDriverStatus().getState(), DriverQueryState.CANCELED);
assertTrue(context.getDriverStatus().getDriverStartTime() > 0);
assertTrue(context.getDriverStatus().getDriverFinishTime() > 0);
driver.closeQuery(handle);
}
/**
* Test invalid query.
*
* @throws Exception the exception
*/
@Test
public void testInvalidQuery() throws Exception {
final String query = "SELECT * FROM invalid_table";
QueryContext ctx = new QueryContext(query, "SA", new LensConf(), baseConf, drivers);
try {
LensResultSet rs = driver.execute(ctx);
fail("Should have thrown exception");
} catch (LensException e) {
log.error("Encountered Lens exception.", e);
}
final CountDownLatch listenerNotificationLatch = new CountDownLatch(1);
QueryCompletionListener listener = new QueryCompletionListener() {
@Override
public void onError(QueryHandle handle, String error) {
listenerNotificationLatch.countDown();
}
@Override
public void onCompletion(QueryHandle handle) {
fail("Was expecting this query to fail " + handle);
}
};
executeAsync(ctx);
QueryHandle handle = ctx.getQueryHandle();
driver.registerForCompletionNotification(ctx, 0, listener);
while (!ctx.getDriverStatus().isFinished()) {
driver.updateStatus(ctx);
System.out.println("Query: " + handle + " Status: " + ctx.getDriverStatus());
Thread.sleep(500);
}
assertEquals(ctx.getDriverStatus().getState(), DriverQueryState.FAILED);
assertEquals(ctx.getDriverStatus().getProgress(), 1.0);
assertTrue(ctx.getDriverStatus().getDriverStartTime() > 0);
assertTrue(ctx.getDriverStatus().getDriverFinishTime() > 0);
listenerNotificationLatch.await(1, TimeUnit.SECONDS);
// fetch result should throw error
try {
driver.fetchResultSet(ctx);
fail("should have thrown error");
} catch (LensException e) {
log.error("Encountered Lens exception", e);
}
driver.closeQuery(handle);
}
@Test
public void testEstimateConf() {
Configuration estimateConf = driver.getEstimateConnectionConf();
assertNotNull(estimateConf);
assertTrue(estimateConf != driver.getConf());
// Validate overridden conf
assertEquals(estimateConf.get(JDBC_USER), "estimateUser");
assertEquals(estimateConf.get(JDBC_POOL_MAX_SIZE.getConfigKey()), "50");
assertEquals(estimateConf.get(JDBC_POOL_IDLE_TIME.getConfigKey()), "800");
assertEquals(estimateConf.get(JDBC_GET_CONNECTION_TIMEOUT.getConfigKey()), "25000");
assertEquals(estimateConf.get(JDBC_MAX_STATEMENTS_PER_CONNECTION.getConfigKey()), "15");
}
@Test
public void testEstimateConnectionPool() throws Exception {
assertNotNull(driver.getEstimateConnectionProvider());
assertTrue(driver.getEstimateConnectionProvider() != driver.getConnectionProvider());
ConnectionProvider connectionProvider = driver.getEstimateConnectionProvider();
assertTrue(connectionProvider instanceof DataSourceConnectionProvider);
DataSourceConnectionProvider estimateCp = (DataSourceConnectionProvider) connectionProvider;
DataSourceConnectionProvider queryCp = (DataSourceConnectionProvider) driver.getConnectionProvider();
assertTrue(estimateCp != queryCp);
DataSourceConnectionProvider.DriverConfig estimateCfg =
estimateCp.getDriverConfigfromConf(driver.getEstimateConnectionConf());
DataSourceConnectionProvider.DriverConfig queryCfg =
queryCp.getDriverConfigfromConf(driver.getConf());
log.info("@@@ ESTIMATE_CFG {}", estimateCfg);
log.info("@@@ QUERY CFG {}", queryCfg);
// Get connection from each so that pools get initialized
try {
Connection estimateConn = estimateCp.getConnection(driver.getEstimateConnectionConf());
estimateConn.close();
} catch (SQLException e) {
// Ignore exception
log.error("Error getting connection from estimate pool", e);
}
try {
Connection queryConn = queryCp.getConnection(driver.getConf());
queryConn.close();
} catch (SQLException e) {
log.error("Error getting connection from query pool", e);
}
ComboPooledDataSource estimatePool = estimateCp.getDataSource(driver.getEstimateConnectionConf());
ComboPooledDataSource queryPool = queryCp.getDataSource(driver.getConf());
assertTrue(estimatePool != queryPool);
// Validate config on estimatePool
assertEquals(estimatePool.getMaxPoolSize(), 50);
assertEquals(estimatePool.getMaxIdleTime(), 800);
assertEquals(estimatePool.getCheckoutTimeout(), 25000);
assertEquals(estimatePool.getMaxStatementsPerConnection(), 15);
assertEquals(estimatePool.getProperties().get("random_key"), "random_value");
}
}
|
googleapis/google-cloud-java | 35,995 | java-contact-center-insights/proto-google-cloud-contact-center-insights-v1/src/main/java/com/google/cloud/contactcenterinsights/v1/ListAnalysesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/contactcenterinsights/v1/contact_center_insights.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.contactcenterinsights.v1;
/**
*
*
* <pre>
* The request to list analyses.
* </pre>
*
* Protobuf type {@code google.cloud.contactcenterinsights.v1.ListAnalysesRequest}
*/
public final class ListAnalysesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.contactcenterinsights.v1.ListAnalysesRequest)
ListAnalysesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListAnalysesRequest.newBuilder() to construct.
private ListAnalysesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListAnalysesRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListAnalysesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.contactcenterinsights.v1.ContactCenterInsightsProto
.internal_static_google_cloud_contactcenterinsights_v1_ListAnalysesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.contactcenterinsights.v1.ContactCenterInsightsProto
.internal_static_google_cloud_contactcenterinsights_v1_ListAnalysesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest.class,
com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource of the analyses.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent resource of the analyses.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of analyses to return in the response. If this
* value is zero, the service will select a default size. A call might return
* fewer objects than requested. A non-empty `next_page_token` in the response
* indicates that more data is available.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The value returned by the last `ListAnalysesResponse`; indicates
* that this is a continuation of a prior `ListAnalyses` call and
* the system should return the next page of data.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The value returned by the last `ListAnalysesResponse`; indicates
* that this is a continuation of a prior `ListAnalyses` call and
* the system should return the next page of data.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* A filter to reduce results to a specific subset. Useful for querying
* conversations with specific properties.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* A filter to reduce results to a specific subset. Useful for querying
* conversations with specific properties.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest)) {
return super.equals(obj);
}
com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest other =
(com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request to list analyses.
* </pre>
*
* Protobuf type {@code google.cloud.contactcenterinsights.v1.ListAnalysesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.contactcenterinsights.v1.ListAnalysesRequest)
com.google.cloud.contactcenterinsights.v1.ListAnalysesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.contactcenterinsights.v1.ContactCenterInsightsProto
.internal_static_google_cloud_contactcenterinsights_v1_ListAnalysesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.contactcenterinsights.v1.ContactCenterInsightsProto
.internal_static_google_cloud_contactcenterinsights_v1_ListAnalysesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest.class,
com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest.Builder.class);
}
// Construct using com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.contactcenterinsights.v1.ContactCenterInsightsProto
.internal_static_google_cloud_contactcenterinsights_v1_ListAnalysesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest
getDefaultInstanceForType() {
return com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest build() {
com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest buildPartial() {
com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest result =
new com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest) {
return mergeFrom((com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest other) {
if (other
== com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource of the analyses.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource of the analyses.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource of the analyses.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource of the analyses.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource of the analyses.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of analyses to return in the response. If this
* value is zero, the service will select a default size. A call might return
* fewer objects than requested. A non-empty `next_page_token` in the response
* indicates that more data is available.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of analyses to return in the response. If this
* value is zero, the service will select a default size. A call might return
* fewer objects than requested. A non-empty `next_page_token` in the response
* indicates that more data is available.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of analyses to return in the response. If this
* value is zero, the service will select a default size. A call might return
* fewer objects than requested. A non-empty `next_page_token` in the response
* indicates that more data is available.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The value returned by the last `ListAnalysesResponse`; indicates
* that this is a continuation of a prior `ListAnalyses` call and
* the system should return the next page of data.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The value returned by the last `ListAnalysesResponse`; indicates
* that this is a continuation of a prior `ListAnalyses` call and
* the system should return the next page of data.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The value returned by the last `ListAnalysesResponse`; indicates
* that this is a continuation of a prior `ListAnalyses` call and
* the system should return the next page of data.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The value returned by the last `ListAnalysesResponse`; indicates
* that this is a continuation of a prior `ListAnalyses` call and
* the system should return the next page of data.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The value returned by the last `ListAnalysesResponse`; indicates
* that this is a continuation of a prior `ListAnalyses` call and
* the system should return the next page of data.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* A filter to reduce results to a specific subset. Useful for querying
* conversations with specific properties.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A filter to reduce results to a specific subset. Useful for querying
* conversations with specific properties.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A filter to reduce results to a specific subset. Useful for querying
* conversations with specific properties.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* A filter to reduce results to a specific subset. Useful for querying
* conversations with specific properties.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* A filter to reduce results to a specific subset. Useful for querying
* conversations with specific properties.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.contactcenterinsights.v1.ListAnalysesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.contactcenterinsights.v1.ListAnalysesRequest)
private static final com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest();
}
public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListAnalysesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListAnalysesRequest>() {
@java.lang.Override
public ListAnalysesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListAnalysesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListAnalysesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hadoop | 36,232 | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatPBINode.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.PermissionStatus;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.fs.XAttr;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto;
import org.apache.hadoop.hdfs.protocolPB.PBHelperClient;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoContiguous;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoStriped;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
import org.apache.hadoop.hdfs.protocol.BlockType;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
import org.apache.hadoop.hdfs.server.namenode.FSImageFormatProtobuf.LoaderContext;
import org.apache.hadoop.hdfs.server.namenode.FSImageFormatProtobuf.SaverContext;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.QuotaByStorageTypeEntryProto;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.QuotaByStorageTypeFeatureProto;
import org.apache.hadoop.hdfs.server.namenode.INodeWithAdditionalFields.PermissionStatusFormat;
import org.apache.hadoop.hdfs.server.namenode.SerialNumberManager.StringTable;
import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot;
import org.apache.hadoop.hdfs.server.namenode.startupprogress.Phase;
import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgress;
import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgress.Counter;
import org.apache.hadoop.hdfs.server.namenode.startupprogress.Step;
import org.apache.hadoop.hdfs.util.EnumCounters;
import org.apache.hadoop.hdfs.util.ReadOnlyList;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList;
import org.apache.hadoop.thirdparty.protobuf.ByteString;
@InterfaceAudience.Private
public final class FSImageFormatPBINode {
public static final int ACL_ENTRY_NAME_MASK = (1 << 24) - 1;
public static final int ACL_ENTRY_NAME_OFFSET = 6;
public static final int ACL_ENTRY_TYPE_OFFSET = 3;
public static final int ACL_ENTRY_SCOPE_OFFSET = 5;
public static final int ACL_ENTRY_PERM_MASK = 7;
public static final int XATTR_NAMESPACE_MASK = 3;
public static final int XATTR_NAMESPACE_OFFSET = 30;
public static final int XATTR_NAME_MASK = (1 << 24) - 1;
public static final int XATTR_NAME_OFFSET = 6;
/* See the comments in fsimage.proto for an explanation of the following. */
public static final int XATTR_NAMESPACE_EXT_OFFSET = 5;
public static final int XATTR_NAMESPACE_EXT_MASK = 1;
private static final Logger LOG =
LoggerFactory.getLogger(FSImageFormatPBINode.class);
private static final int DIRECTORY_ENTRY_BATCH_SIZE = 1000;
// the loader must decode all fields referencing serial number based fields
// via to<Item> methods with the string table.
public final static class Loader {
public static PermissionStatus loadPermission(long id,
final StringTable stringTable) {
return PermissionStatusFormat.toPermissionStatus(id, stringTable);
}
public static ImmutableList<AclEntry> loadAclEntries(
AclFeatureProto proto, final StringTable stringTable) {
ImmutableList.Builder<AclEntry> b = ImmutableList.builder();
for (int v : proto.getEntriesList()) {
b.add(AclEntryStatusFormat.toAclEntry(v, stringTable));
}
return b.build();
}
public static List<XAttr> loadXAttrs(
XAttrFeatureProto proto, final StringTable stringTable) {
List<XAttr> b = new ArrayList<>();
for (XAttrCompactProto xAttrCompactProto : proto.getXAttrsList()) {
int v = xAttrCompactProto.getName();
byte[] value = null;
if (xAttrCompactProto.getValue() != null) {
value = xAttrCompactProto.getValue().toByteArray();
}
b.add(XAttrFormat.toXAttr(v, value, stringTable));
}
return b;
}
public static ImmutableList<QuotaByStorageTypeEntry> loadQuotaByStorageTypeEntries(
QuotaByStorageTypeFeatureProto proto) {
ImmutableList.Builder<QuotaByStorageTypeEntry> b = ImmutableList.builder();
for (QuotaByStorageTypeEntryProto quotaEntry : proto.getQuotasList()) {
StorageType type = PBHelperClient.convertStorageType(quotaEntry.getStorageType());
long quota = quotaEntry.getQuota();
b.add(new QuotaByStorageTypeEntry.Builder().setStorageType(type)
.setQuota(quota).build());
}
return b.build();
}
public static INodeDirectory loadINodeDirectory(INodeSection.INode n,
LoaderContext state) {
assert n.getType() == INodeSection.INode.Type.DIRECTORY;
INodeSection.INodeDirectory d = n.getDirectory();
final PermissionStatus permissions = loadPermission(d.getPermission(),
state.getStringTable());
final INodeDirectory dir = new INodeDirectory(n.getId(), n.getName()
.toByteArray(), permissions, d.getModificationTime());
final long nsQuota = d.getNsQuota(), dsQuota = d.getDsQuota();
if (nsQuota >= 0 || dsQuota >= 0) {
dir.addDirectoryWithQuotaFeature(new DirectoryWithQuotaFeature.Builder().
nameSpaceQuota(nsQuota).storageSpaceQuota(dsQuota).build());
}
EnumCounters<StorageType> typeQuotas = null;
if (d.hasTypeQuotas()) {
ImmutableList<QuotaByStorageTypeEntry> qes =
loadQuotaByStorageTypeEntries(d.getTypeQuotas());
typeQuotas = new EnumCounters<StorageType>(StorageType.class,
HdfsConstants.QUOTA_RESET);
for (QuotaByStorageTypeEntry qe : qes) {
if (qe.getQuota() >= 0 && qe.getStorageType() != null &&
qe.getStorageType().supportTypeQuota()) {
typeQuotas.set(qe.getStorageType(), qe.getQuota());
}
}
if (typeQuotas.anyGreaterOrEqual(0)) {
DirectoryWithQuotaFeature q = dir.getDirectoryWithQuotaFeature();
if (q == null) {
dir.addDirectoryWithQuotaFeature(new DirectoryWithQuotaFeature.
Builder().typeQuotas(typeQuotas).build());
} else {
q.setQuota(typeQuotas);
}
}
}
if (d.hasAcl()) {
int[] entries = AclEntryStatusFormat.toInt(loadAclEntries(
d.getAcl(), state.getStringTable()));
dir.addAclFeature(new AclFeature(entries));
}
if (d.hasXAttrs()) {
dir.addXAttrFeature(new XAttrFeature(
loadXAttrs(d.getXAttrs(), state.getStringTable())));
}
return dir;
}
public static void updateBlocksMap(INodeFile file, BlockManager bm) {
// Add file->block mapping
final BlockInfo[] blocks = file.getBlocks();
if (blocks != null) {
for (int i = 0; i < blocks.length; i++) {
file.setBlock(i, bm.addBlockCollectionWithCheck(blocks[i], file));
}
}
}
private final FSDirectory dir;
private final FSNamesystem fsn;
private final FSImageFormatProtobuf.Loader parent;
// Update blocks map by single thread asynchronously
private ExecutorService blocksMapUpdateExecutor;
// update name cache by single thread asynchronously.
private ExecutorService nameCacheUpdateExecutor;
Loader(FSNamesystem fsn, final FSImageFormatProtobuf.Loader parent) {
this.fsn = fsn;
this.dir = fsn.dir;
this.parent = parent;
// Note: these executors must be SingleThreadExecutor, as they
// are used to modify structures which are not thread safe.
blocksMapUpdateExecutor = Executors.newSingleThreadExecutor();
nameCacheUpdateExecutor = Executors.newSingleThreadExecutor();
}
void loadINodeDirectorySectionInParallel(ExecutorService service,
ArrayList<FileSummary.Section> sections, String compressionCodec)
throws IOException {
LOG.info("Loading the INodeDirectory section in parallel with {} sub-" +
"sections", sections.size());
CountDownLatch latch = new CountDownLatch(sections.size());
final List<IOException> exceptions = Collections.synchronizedList(new ArrayList<>());
for (FileSummary.Section s : sections) {
service.submit(() -> {
InputStream ins = null;
try {
ins = parent.getInputStreamForSection(s,
compressionCodec);
loadINodeDirectorySection(ins);
} catch (Exception e) {
LOG.error("An exception occurred loading INodeDirectories in parallel", e);
exceptions.add(new IOException(e));
} finally {
latch.countDown();
try {
if (ins != null) {
ins.close();
}
} catch (IOException ioe) {
LOG.warn("Failed to close the input stream, ignoring", ioe);
}
}
});
}
try {
latch.await();
} catch (InterruptedException e) {
LOG.error("Interrupted waiting for countdown latch", e);
throw new IOException(e);
}
if (exceptions.size() != 0) {
LOG.error("{} exceptions occurred loading INodeDirectories",
exceptions.size());
throw exceptions.get(0);
}
LOG.info("Completed loading all INodeDirectory sub-sections");
}
void loadINodeDirectorySection(InputStream in) throws IOException {
final List<INodeReference> refList = parent.getLoaderContext()
.getRefList();
while (true) {
INodeDirectorySection.DirEntry e = INodeDirectorySection.DirEntry
.parseDelimitedFrom(in);
// note that in is a LimitedInputStream
if (e == null) {
break;
}
INodeDirectory p = dir.getInode(e.getParent()).asDirectory();
for (long id : e.getChildrenList()) {
INode child = dir.getInode(id);
if (!addToParent(p, child)) {
LOG.warn("Failed to add the inode {} to the directory {}",
child.getId(), p.getId());
}
}
for (int refId : e.getRefChildrenList()) {
INodeReference ref = refList.get(refId);
if (!addToParent(p, ref)) {
LOG.warn("Failed to add the inode reference {} to the directory {}",
ref.getId(), p.getId());
}
}
}
}
private void fillUpInodeList(ArrayList<INode> inodeList, INode inode) {
if (inode.isFile()) {
inodeList.add(inode);
}
if (inodeList.size() >= DIRECTORY_ENTRY_BATCH_SIZE) {
addToCacheAndBlockMap(inodeList);
inodeList.clear();
}
}
private void addToCacheAndBlockMap(final ArrayList<INode> inodeList) {
final ArrayList<INode> inodes = new ArrayList<>(inodeList);
nameCacheUpdateExecutor.submit(
new Runnable() {
@Override
public void run() {
addToCacheInternal(inodes);
}
});
blocksMapUpdateExecutor.submit(
new Runnable() {
@Override
public void run() {
updateBlockMapInternal(inodes);
}
});
}
// update name cache with non-thread safe
private void addToCacheInternal(ArrayList<INode> inodeList) {
for (INode i : inodeList) {
dir.cacheName(i);
}
}
// update blocks map with non-thread safe
private void updateBlockMapInternal(ArrayList<INode> inodeList) {
for (INode i : inodeList) {
updateBlocksMap(i.asFile(), fsn.getBlockManager());
}
}
void waitBlocksMapAndNameCacheUpdateFinished() throws IOException {
long start = System.currentTimeMillis();
waitExecutorTerminated(blocksMapUpdateExecutor);
waitExecutorTerminated(nameCacheUpdateExecutor);
LOG.info("Completed update blocks map and name cache, total waiting "
+ "duration {}ms.", (System.currentTimeMillis() - start));
}
private void waitExecutorTerminated(ExecutorService executorService)
throws IOException {
executorService.shutdown();
long start = System.currentTimeMillis();
while (!executorService.isTerminated()) {
try {
executorService.awaitTermination(1, TimeUnit.SECONDS);
if (LOG.isDebugEnabled()) {
LOG.debug("Waiting to executor service terminated duration {}ms.",
(System.currentTimeMillis() - start));
}
} catch (InterruptedException e) {
LOG.error("Interrupted waiting for executor terminated.", e);
throw new IOException(e);
}
}
}
void loadINodeSection(InputStream in, StartupProgress prog,
Step currentStep) throws IOException {
loadINodeSectionHeader(in, prog, currentStep);
Counter counter = prog.getCounter(Phase.LOADING_FSIMAGE, currentStep);
int totalLoaded = loadINodesInSection(in, counter);
LOG.info("Successfully loaded {} inodes", totalLoaded);
}
private int loadINodesInSection(InputStream in, Counter counter)
throws IOException {
// As the input stream is a LimitInputStream, the reading will stop when
// EOF is encountered at the end of the stream.
int cntr = 0;
ArrayList<INode> inodeList = new ArrayList<>();
while (true) {
INodeSection.INode p = INodeSection.INode.parseDelimitedFrom(in);
if (p == null) {
break;
}
if (p.getId() == INodeId.ROOT_INODE_ID) {
synchronized(this) {
loadRootINode(p);
}
} else {
INode n = loadINode(p);
synchronized(this) {
dir.addToInodeMap(n);
}
fillUpInodeList(inodeList, n);
}
cntr++;
if (counter != null) {
counter.increment();
}
}
if (inodeList.size() > 0){
addToCacheAndBlockMap(inodeList);
}
return cntr;
}
private long loadINodeSectionHeader(InputStream in, StartupProgress prog,
Step currentStep) throws IOException {
INodeSection s = INodeSection.parseDelimitedFrom(in);
fsn.dir.resetLastInodeId(s.getLastInodeId());
long numInodes = s.getNumInodes();
LOG.info("Loading " + numInodes + " INodes.");
prog.setTotal(Phase.LOADING_FSIMAGE, currentStep, numInodes);
return numInodes;
}
void loadINodeSectionInParallel(ExecutorService service,
ArrayList<FileSummary.Section> sections,
String compressionCodec, StartupProgress prog,
Step currentStep) throws IOException {
LOG.info("Loading the INode section in parallel with {} sub-sections",
sections.size());
long expectedInodes = 0;
CountDownLatch latch = new CountDownLatch(sections.size());
AtomicInteger totalLoaded = new AtomicInteger(0);
final List<IOException> exceptions = Collections.synchronizedList(new ArrayList<>());
Counter counter = prog.getCounter(Phase.LOADING_FSIMAGE, currentStep);
for (int i=0; i < sections.size(); i++) {
FileSummary.Section s = sections.get(i);
InputStream ins = parent.getInputStreamForSection(s, compressionCodec);
if (i == 0) {
// The first inode section has a header which must be processed first
expectedInodes = loadINodeSectionHeader(ins, prog, currentStep);
}
service.submit(() -> {
try {
totalLoaded.addAndGet(loadINodesInSection(ins, counter));
} catch (Exception e) {
LOG.error("An exception occurred loading INodes in parallel", e);
exceptions.add(new IOException(e));
} finally {
latch.countDown();
try {
ins.close();
} catch (IOException ioe) {
LOG.warn("Failed to close the input stream, ignoring", ioe);
}
}
});
}
try {
latch.await();
} catch (InterruptedException e) {
LOG.info("Interrupted waiting for countdown latch");
}
if (exceptions.size() != 0) {
LOG.error("{} exceptions occurred loading INodes", exceptions.size());
throw exceptions.get(0);
}
if (totalLoaded.get() != expectedInodes) {
throw new IOException("Expected to load "+expectedInodes+" in " +
"parallel, but loaded "+totalLoaded.get()+". The image may " +
"be corrupt.");
}
LOG.info("Completed loading all INode sections. Loaded {} inodes.",
totalLoaded.get());
}
/**
* Load the under-construction files section, and update the lease map
*/
void loadFilesUnderConstructionSection(InputStream in) throws IOException {
// Leases are added when the inode section is loaded. This section is
// still read in for compatibility reasons.
while (true) {
FileUnderConstructionEntry entry = FileUnderConstructionEntry
.parseDelimitedFrom(in);
if (entry == null) {
break;
}
}
}
private boolean addToParent(INodeDirectory parentDir, INode child) {
if (parentDir == dir.rootDir && FSDirectory.isReservedName(child)) {
throw new HadoopIllegalArgumentException("File name \""
+ child.getLocalName() + "\" is reserved. Please "
+ " change the name of the existing file or directory to another "
+ "name before upgrading to this release.");
}
// NOTE: This does not update space counts for parents
if (!parentDir.addChildAtLoading(child)) {
return false;
}
return true;
}
private INode loadINode(INodeSection.INode n) {
switch (n.getType()) {
case FILE:
return loadINodeFile(n);
case DIRECTORY:
return loadINodeDirectory(n, parent.getLoaderContext());
case SYMLINK:
return loadINodeSymlink(n);
default:
break;
}
return null;
}
private INodeFile loadINodeFile(INodeSection.INode n) {
assert n.getType() == INodeSection.INode.Type.FILE;
INodeSection.INodeFile f = n.getFile();
List<BlockProto> bp = f.getBlocksList();
BlockType blockType = PBHelperClient.convert(f.getBlockType());
LoaderContext state = parent.getLoaderContext();
boolean isStriped = f.hasErasureCodingPolicyID();
assert ((!isStriped) || (isStriped && !f.hasReplication()));
Short replication = (!isStriped ? (short) f.getReplication() : null);
Byte ecPolicyID = (isStriped ?
(byte) f.getErasureCodingPolicyID() : null);
ErasureCodingPolicy ecPolicy = isStriped ?
fsn.getErasureCodingPolicyManager().getByID(ecPolicyID) : null;
BlockInfo[] blocks = new BlockInfo[bp.size()];
for (int i = 0; i < bp.size(); ++i) {
BlockProto b = bp.get(i);
if (isStriped) {
Preconditions.checkState(ecPolicy.getId() > 0,
"File with ID " + n.getId() +
" has an invalid erasure coding policy ID " + ecPolicy.getId());
blocks[i] = new BlockInfoStriped(PBHelperClient.convert(b), ecPolicy);
} else {
blocks[i] = new BlockInfoContiguous(PBHelperClient.convert(b),
replication);
}
}
final PermissionStatus permissions = loadPermission(f.getPermission(),
parent.getLoaderContext().getStringTable());
final INodeFile file = new INodeFile(n.getId(),
n.getName().toByteArray(), permissions, f.getModificationTime(),
f.getAccessTime(), blocks, replication, ecPolicyID,
f.getPreferredBlockSize(), (byte)f.getStoragePolicyID(), blockType);
if (f.hasAcl()) {
int[] entries = AclEntryStatusFormat.toInt(loadAclEntries(
f.getAcl(), state.getStringTable()));
file.addAclFeature(new AclFeature(entries));
}
if (f.hasXAttrs()) {
file.addXAttrFeature(new XAttrFeature(
loadXAttrs(f.getXAttrs(), state.getStringTable())));
}
// under-construction information
if (f.hasFileUC()) {
INodeSection.FileUnderConstructionFeature uc = f.getFileUC();
file.toUnderConstruction(uc.getClientName(), uc.getClientMachine());
// update the lease manager
fsn.leaseManager.addLease(uc.getClientName(), file.getId());
if (blocks.length > 0) {
BlockInfo lastBlk = file.getLastBlock();
// replace the last block of file
final BlockInfo ucBlk;
if (isStriped) {
BlockInfoStriped striped = (BlockInfoStriped) lastBlk;
ucBlk = new BlockInfoStriped(striped, ecPolicy);
} else {
ucBlk = new BlockInfoContiguous(lastBlk,
replication);
}
ucBlk.convertToBlockUnderConstruction(
HdfsServerConstants.BlockUCState.UNDER_CONSTRUCTION, null);
file.setBlock(file.numBlocks() - 1, ucBlk);
}
}
return file;
}
private INodeSymlink loadINodeSymlink(INodeSection.INode n) {
assert n.getType() == INodeSection.INode.Type.SYMLINK;
INodeSection.INodeSymlink s = n.getSymlink();
final PermissionStatus permissions = loadPermission(s.getPermission(),
parent.getLoaderContext().getStringTable());
INodeSymlink sym = new INodeSymlink(n.getId(), n.getName().toByteArray(),
permissions, s.getModificationTime(), s.getAccessTime(),
s.getTarget().toStringUtf8());
return sym;
}
private void loadRootINode(INodeSection.INode p) {
INodeDirectory root = loadINodeDirectory(p, parent.getLoaderContext());
final QuotaCounts q = root.getQuotaCounts();
final long nsQuota = q.getNameSpace();
final long dsQuota = q.getStorageSpace();
if (nsQuota != -1 || dsQuota != -1) {
dir.rootDir.getDirectoryWithQuotaFeature().setQuota(nsQuota, dsQuota);
}
final EnumCounters<StorageType> typeQuotas = q.getTypeSpaces();
if (typeQuotas.anyGreaterOrEqual(0)) {
dir.rootDir.getDirectoryWithQuotaFeature().setQuota(typeQuotas);
}
dir.rootDir.cloneModificationTime(root);
dir.rootDir.clonePermissionStatus(root);
final AclFeature af = root.getFeature(AclFeature.class);
if (af != null) {
dir.rootDir.addAclFeature(af);
}
// root dir supports having extended attributes according to POSIX
final XAttrFeature f = root.getXAttrFeature();
if (f != null) {
dir.rootDir.addXAttrFeature(f);
}
dir.addRootDirToEncryptionZone(f);
}
}
// the saver can directly write out fields referencing serial numbers.
// the serial number maps will be compacted when loading.
public final static class Saver {
private long numImageErrors;
private static long buildPermissionStatus(INodeAttributes n) {
return n.getPermissionLong();
}
private static AclFeatureProto.Builder buildAclEntries(AclFeature f) {
AclFeatureProto.Builder b = AclFeatureProto.newBuilder();
for (int pos = 0, e; pos < f.getEntriesSize(); pos++) {
e = f.getEntryAt(pos);
b.addEntries(e);
}
return b;
}
private static XAttrFeatureProto.Builder buildXAttrs(XAttrFeature f) {
XAttrFeatureProto.Builder b = XAttrFeatureProto.newBuilder();
for (XAttr a : f.getXAttrs()) {
XAttrCompactProto.Builder xAttrCompactBuilder = XAttrCompactProto.
newBuilder();
int v = XAttrFormat.toInt(a);
xAttrCompactBuilder.setName(v);
if (a.getValue() != null) {
xAttrCompactBuilder.setValue(PBHelperClient.getByteString(a.getValue()));
}
b.addXAttrs(xAttrCompactBuilder.build());
}
return b;
}
private static QuotaByStorageTypeFeatureProto.Builder
buildQuotaByStorageTypeEntries(QuotaCounts q) {
QuotaByStorageTypeFeatureProto.Builder b =
QuotaByStorageTypeFeatureProto.newBuilder();
for (StorageType t: StorageType.getTypesSupportingQuota()) {
if (q.getTypeSpace(t) >= 0) {
QuotaByStorageTypeEntryProto.Builder eb =
QuotaByStorageTypeEntryProto.newBuilder().
setStorageType(PBHelperClient.convertStorageType(t)).
setQuota(q.getTypeSpace(t));
b.addQuotas(eb);
}
}
return b;
}
public static INodeSection.INodeFile.Builder buildINodeFile(
INodeFileAttributes file, final SaverContext state) {
INodeSection.INodeFile.Builder b = INodeSection.INodeFile.newBuilder()
.setAccessTime(file.getAccessTime())
.setModificationTime(file.getModificationTime())
.setPermission(buildPermissionStatus(file))
.setPreferredBlockSize(file.getPreferredBlockSize())
.setStoragePolicyID(file.getLocalStoragePolicyID())
.setBlockType(PBHelperClient.convert(file.getBlockType()));
if (file.isStriped()) {
b.setErasureCodingPolicyID(file.getErasureCodingPolicyID());
} else {
b.setReplication(file.getFileReplication());
}
AclFeature f = file.getAclFeature();
if (f != null) {
b.setAcl(buildAclEntries(f));
}
XAttrFeature xAttrFeature = file.getXAttrFeature();
if (xAttrFeature != null) {
b.setXAttrs(buildXAttrs(xAttrFeature));
}
return b;
}
public static INodeSection.INodeDirectory.Builder buildINodeDirectory(
INodeDirectoryAttributes dir, final SaverContext state) {
QuotaCounts quota = dir.getQuotaCounts();
INodeSection.INodeDirectory.Builder b = INodeSection.INodeDirectory
.newBuilder().setModificationTime(dir.getModificationTime())
.setNsQuota(quota.getNameSpace())
.setDsQuota(quota.getStorageSpace())
.setPermission(buildPermissionStatus(dir));
if (quota.getTypeSpaces().anyGreaterOrEqual(0)) {
b.setTypeQuotas(buildQuotaByStorageTypeEntries(quota));
}
AclFeature f = dir.getAclFeature();
if (f != null) {
b.setAcl(buildAclEntries(f));
}
XAttrFeature xAttrFeature = dir.getXAttrFeature();
if (xAttrFeature != null) {
b.setXAttrs(buildXAttrs(xAttrFeature));
}
return b;
}
private final FSNamesystem fsn;
private final FileSummary.Builder summary;
private final SaveNamespaceContext context;
private final FSImageFormatProtobuf.Saver parent;
Saver(FSImageFormatProtobuf.Saver parent, FileSummary.Builder summary) {
this.parent = parent;
this.summary = summary;
this.context = parent.getContext();
this.fsn = context.getSourceNamesystem();
this.numImageErrors = 0;
}
void serializeINodeDirectorySection(OutputStream out) throws IOException {
FSDirectory dir = fsn.getFSDirectory();
Iterator<INodeWithAdditionalFields> iter = dir.getINodeMap()
.getMapIterator();
final ArrayList<INodeReference> refList = parent.getSaverContext()
.getRefList();
int i = 0;
int outputInodes = 0;
while (iter.hasNext()) {
INodeWithAdditionalFields n = iter.next();
if (!n.isDirectory()) {
continue;
}
ReadOnlyList<INode> children = n.asDirectory().getChildrenList(
Snapshot.CURRENT_STATE_ID);
if (children.size() > 0) {
INodeDirectorySection.DirEntry.Builder b = INodeDirectorySection.
DirEntry.newBuilder().setParent(n.getId());
for (INode inode : children) {
// Error if the child inode doesn't exist in inodeMap
if (dir.getInode(inode.getId()) == null) {
FSImage.LOG.error(
"FSImageFormatPBINode#serializeINodeDirectorySection: " +
"Dangling child pointer found. Missing INode in " +
"inodeMap: id=" + inode.getId() +
"; path=" + inode.getFullPathName() +
"; parent=" + (inode.getParent() == null ? "null" :
inode.getParent().getFullPathName()));
++numImageErrors;
}
if (!inode.isReference()) {
// Serialization must ensure that children are in order, related
// to HDFS-13693
b.addChildren(inode.getId());
} else {
refList.add(inode.asReference());
b.addRefChildren(refList.size() - 1);
}
outputInodes++;
}
INodeDirectorySection.DirEntry e = b.build();
e.writeDelimitedTo(out);
}
++i;
if (i % FSImageFormatProtobuf.Saver.CHECK_CANCEL_INTERVAL == 0) {
context.checkCancelled();
}
if (outputInodes >= parent.getInodesPerSubSection()) {
outputInodes = 0;
parent.commitSubSection(summary,
FSImageFormatProtobuf.SectionName.INODE_DIR_SUB);
out = parent.getSectionOutputStream();
}
}
parent.commitSectionAndSubSection(summary,
FSImageFormatProtobuf.SectionName.INODE_DIR,
FSImageFormatProtobuf.SectionName.INODE_DIR_SUB);
}
void serializeINodeSection(OutputStream out) throws IOException {
INodeMap inodesMap = fsn.dir.getINodeMap();
INodeSection.Builder b = INodeSection.newBuilder()
.setLastInodeId(fsn.dir.getLastInodeId()).setNumInodes(inodesMap.size());
INodeSection s = b.build();
s.writeDelimitedTo(out);
int i = 0;
Iterator<INodeWithAdditionalFields> iter = inodesMap.getMapIterator();
while (iter.hasNext()) {
INodeWithAdditionalFields n = iter.next();
save(out, n);
++i;
if (i % FSImageFormatProtobuf.Saver.CHECK_CANCEL_INTERVAL == 0) {
context.checkCancelled();
}
if (i % parent.getInodesPerSubSection() == 0) {
parent.commitSubSection(summary,
FSImageFormatProtobuf.SectionName.INODE_SUB);
out = parent.getSectionOutputStream();
}
}
parent.commitSectionAndSubSection(summary,
FSImageFormatProtobuf.SectionName.INODE,
FSImageFormatProtobuf.SectionName.INODE_SUB);
}
void serializeFilesUCSection(OutputStream out) throws IOException {
Collection<Long> filesWithUC = fsn.getLeaseManager()
.getINodeIdWithLeases();
for (Long id : filesWithUC) {
INode inode = fsn.getFSDirectory().getInode(id);
if (inode == null) {
LOG.warn("Fail to find inode " + id + " when saving the leases.");
continue;
}
INodeFile file = inode.asFile();
if (!file.isUnderConstruction()) {
LOG.warn("Fail to save the lease for inode id " + id
+ " as the file is not under construction");
continue;
}
String path = file.getFullPathName();
FileUnderConstructionEntry.Builder b = FileUnderConstructionEntry
.newBuilder().setInodeId(file.getId()).setFullPath(path);
FileUnderConstructionEntry e = b.build();
e.writeDelimitedTo(out);
}
parent.commitSection(summary,
FSImageFormatProtobuf.SectionName.FILES_UNDERCONSTRUCTION);
}
private void save(OutputStream out, INode n) throws IOException {
if (n.isDirectory()) {
save(out, n.asDirectory());
} else if (n.isFile()) {
save(out, n.asFile());
} else if (n.isSymlink()) {
save(out, n.asSymlink());
}
}
private void save(OutputStream out, INodeDirectory n) throws IOException {
INodeSection.INodeDirectory.Builder b = buildINodeDirectory(n,
parent.getSaverContext());
INodeSection.INode r = buildINodeCommon(n)
.setType(INodeSection.INode.Type.DIRECTORY).setDirectory(b).build();
r.writeDelimitedTo(out);
}
private void save(OutputStream out, INodeFile n) throws IOException {
INodeSection.INodeFile.Builder b = buildINodeFile(n,
parent.getSaverContext());
BlockInfo[] blocks = n.getBlocks();
if (blocks != null) {
for (Block block : n.getBlocks()) {
b.addBlocks(PBHelperClient.convert(block));
}
}
FileUnderConstructionFeature uc = n.getFileUnderConstructionFeature();
if (uc != null) {
INodeSection.FileUnderConstructionFeature f =
INodeSection.FileUnderConstructionFeature
.newBuilder().setClientName(uc.getClientName())
.setClientMachine(uc.getClientMachine()).build();
b.setFileUC(f);
}
INodeSection.INode r = buildINodeCommon(n)
.setType(INodeSection.INode.Type.FILE).setFile(b).build();
r.writeDelimitedTo(out);
}
private void save(OutputStream out, INodeSymlink n) throws IOException {
INodeSection.INodeSymlink.Builder b = INodeSection.INodeSymlink
.newBuilder()
.setPermission(buildPermissionStatus(n))
.setTarget(ByteString.copyFrom(n.getSymlink()))
.setModificationTime(n.getModificationTime())
.setAccessTime(n.getAccessTime());
INodeSection.INode r = buildINodeCommon(n)
.setType(INodeSection.INode.Type.SYMLINK).setSymlink(b).build();
r.writeDelimitedTo(out);
}
private INodeSection.INode.Builder buildINodeCommon(INode n) {
return INodeSection.INode.newBuilder()
.setId(n.getId())
.setName(ByteString.copyFrom(n.getLocalNameBytes()));
}
/**
* Number of non-fatal errors detected while writing the
* INodeSection and INodeDirectorySection sections.
* @return the number of non-fatal errors detected.
*/
public long getNumImageErrors() {
return numImageErrors;
}
}
private FSImageFormatPBINode() {
}
}
|
googleapis/google-cloud-java | 36,057 | java-iam-admin/proto-google-iam-admin-v1/src/main/java/com/google/iam/admin/v1/ListServiceAccountsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/iam/admin/v1/iam.proto
// Protobuf Java Version: 3.25.8
package com.google.iam.admin.v1;
/**
*
*
* <pre>
* The service account list response.
* </pre>
*
* Protobuf type {@code google.iam.admin.v1.ListServiceAccountsResponse}
*/
public final class ListServiceAccountsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.iam.admin.v1.ListServiceAccountsResponse)
ListServiceAccountsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListServiceAccountsResponse.newBuilder() to construct.
private ListServiceAccountsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListServiceAccountsResponse() {
accounts_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListServiceAccountsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.iam.admin.v1.Iam
.internal_static_google_iam_admin_v1_ListServiceAccountsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.iam.admin.v1.Iam
.internal_static_google_iam_admin_v1_ListServiceAccountsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.iam.admin.v1.ListServiceAccountsResponse.class,
com.google.iam.admin.v1.ListServiceAccountsResponse.Builder.class);
}
public static final int ACCOUNTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.iam.admin.v1.ServiceAccount> accounts_;
/**
*
*
* <pre>
* The list of matching service accounts.
* </pre>
*
* <code>repeated .google.iam.admin.v1.ServiceAccount accounts = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.iam.admin.v1.ServiceAccount> getAccountsList() {
return accounts_;
}
/**
*
*
* <pre>
* The list of matching service accounts.
* </pre>
*
* <code>repeated .google.iam.admin.v1.ServiceAccount accounts = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.iam.admin.v1.ServiceAccountOrBuilder>
getAccountsOrBuilderList() {
return accounts_;
}
/**
*
*
* <pre>
* The list of matching service accounts.
* </pre>
*
* <code>repeated .google.iam.admin.v1.ServiceAccount accounts = 1;</code>
*/
@java.lang.Override
public int getAccountsCount() {
return accounts_.size();
}
/**
*
*
* <pre>
* The list of matching service accounts.
* </pre>
*
* <code>repeated .google.iam.admin.v1.ServiceAccount accounts = 1;</code>
*/
@java.lang.Override
public com.google.iam.admin.v1.ServiceAccount getAccounts(int index) {
return accounts_.get(index);
}
/**
*
*
* <pre>
* The list of matching service accounts.
* </pre>
*
* <code>repeated .google.iam.admin.v1.ServiceAccount accounts = 1;</code>
*/
@java.lang.Override
public com.google.iam.admin.v1.ServiceAccountOrBuilder getAccountsOrBuilder(int index) {
return accounts_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* To retrieve the next page of results, set
* [ListServiceAccountsRequest.page_token][google.iam.admin.v1.ListServiceAccountsRequest.page_token]
* to this value.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* To retrieve the next page of results, set
* [ListServiceAccountsRequest.page_token][google.iam.admin.v1.ListServiceAccountsRequest.page_token]
* to this value.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < accounts_.size(); i++) {
output.writeMessage(1, accounts_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < accounts_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, accounts_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.iam.admin.v1.ListServiceAccountsResponse)) {
return super.equals(obj);
}
com.google.iam.admin.v1.ListServiceAccountsResponse other =
(com.google.iam.admin.v1.ListServiceAccountsResponse) obj;
if (!getAccountsList().equals(other.getAccountsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAccountsCount() > 0) {
hash = (37 * hash) + ACCOUNTS_FIELD_NUMBER;
hash = (53 * hash) + getAccountsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.iam.admin.v1.ListServiceAccountsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.iam.admin.v1.ListServiceAccountsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.iam.admin.v1.ListServiceAccountsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.iam.admin.v1.ListServiceAccountsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.iam.admin.v1.ListServiceAccountsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.iam.admin.v1.ListServiceAccountsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.iam.admin.v1.ListServiceAccountsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.iam.admin.v1.ListServiceAccountsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.iam.admin.v1.ListServiceAccountsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.iam.admin.v1.ListServiceAccountsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.iam.admin.v1.ListServiceAccountsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.iam.admin.v1.ListServiceAccountsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.iam.admin.v1.ListServiceAccountsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The service account list response.
* </pre>
*
* Protobuf type {@code google.iam.admin.v1.ListServiceAccountsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.iam.admin.v1.ListServiceAccountsResponse)
com.google.iam.admin.v1.ListServiceAccountsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.iam.admin.v1.Iam
.internal_static_google_iam_admin_v1_ListServiceAccountsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.iam.admin.v1.Iam
.internal_static_google_iam_admin_v1_ListServiceAccountsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.iam.admin.v1.ListServiceAccountsResponse.class,
com.google.iam.admin.v1.ListServiceAccountsResponse.Builder.class);
}
// Construct using com.google.iam.admin.v1.ListServiceAccountsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (accountsBuilder_ == null) {
accounts_ = java.util.Collections.emptyList();
} else {
accounts_ = null;
accountsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.iam.admin.v1.Iam
.internal_static_google_iam_admin_v1_ListServiceAccountsResponse_descriptor;
}
@java.lang.Override
public com.google.iam.admin.v1.ListServiceAccountsResponse getDefaultInstanceForType() {
return com.google.iam.admin.v1.ListServiceAccountsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.iam.admin.v1.ListServiceAccountsResponse build() {
com.google.iam.admin.v1.ListServiceAccountsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.iam.admin.v1.ListServiceAccountsResponse buildPartial() {
com.google.iam.admin.v1.ListServiceAccountsResponse result =
new com.google.iam.admin.v1.ListServiceAccountsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.iam.admin.v1.ListServiceAccountsResponse result) {
if (accountsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
accounts_ = java.util.Collections.unmodifiableList(accounts_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.accounts_ = accounts_;
} else {
result.accounts_ = accountsBuilder_.build();
}
}
private void buildPartial0(com.google.iam.admin.v1.ListServiceAccountsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.iam.admin.v1.ListServiceAccountsResponse) {
return mergeFrom((com.google.iam.admin.v1.ListServiceAccountsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.iam.admin.v1.ListServiceAccountsResponse other) {
if (other == com.google.iam.admin.v1.ListServiceAccountsResponse.getDefaultInstance())
return this;
if (accountsBuilder_ == null) {
if (!other.accounts_.isEmpty()) {
if (accounts_.isEmpty()) {
accounts_ = other.accounts_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAccountsIsMutable();
accounts_.addAll(other.accounts_);
}
onChanged();
}
} else {
if (!other.accounts_.isEmpty()) {
if (accountsBuilder_.isEmpty()) {
accountsBuilder_.dispose();
accountsBuilder_ = null;
accounts_ = other.accounts_;
bitField0_ = (bitField0_ & ~0x00000001);
accountsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAccountsFieldBuilder()
: null;
} else {
accountsBuilder_.addAllMessages(other.accounts_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.iam.admin.v1.ServiceAccount m =
input.readMessage(
com.google.iam.admin.v1.ServiceAccount.parser(), extensionRegistry);
if (accountsBuilder_ == null) {
ensureAccountsIsMutable();
accounts_.add(m);
} else {
accountsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.iam.admin.v1.ServiceAccount> accounts_ =
java.util.Collections.emptyList();
private void ensureAccountsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
accounts_ = new java.util.ArrayList<com.google.iam.admin.v1.ServiceAccount>(accounts_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.iam.admin.v1.ServiceAccount,
com.google.iam.admin.v1.ServiceAccount.Builder,
com.google.iam.admin.v1.ServiceAccountOrBuilder>
accountsBuilder_;
/**
*
*
* <pre>
* The list of matching service accounts.
* </pre>
*
* <code>repeated .google.iam.admin.v1.ServiceAccount accounts = 1;</code>
*/
public java.util.List<com.google.iam.admin.v1.ServiceAccount> getAccountsList() {
if (accountsBuilder_ == null) {
return java.util.Collections.unmodifiableList(accounts_);
} else {
return accountsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of matching service accounts.
* </pre>
*
* <code>repeated .google.iam.admin.v1.ServiceAccount accounts = 1;</code>
*/
public int getAccountsCount() {
if (accountsBuilder_ == null) {
return accounts_.size();
} else {
return accountsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of matching service accounts.
* </pre>
*
* <code>repeated .google.iam.admin.v1.ServiceAccount accounts = 1;</code>
*/
public com.google.iam.admin.v1.ServiceAccount getAccounts(int index) {
if (accountsBuilder_ == null) {
return accounts_.get(index);
} else {
return accountsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of matching service accounts.
* </pre>
*
* <code>repeated .google.iam.admin.v1.ServiceAccount accounts = 1;</code>
*/
public Builder setAccounts(int index, com.google.iam.admin.v1.ServiceAccount value) {
if (accountsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAccountsIsMutable();
accounts_.set(index, value);
onChanged();
} else {
accountsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of matching service accounts.
* </pre>
*
* <code>repeated .google.iam.admin.v1.ServiceAccount accounts = 1;</code>
*/
public Builder setAccounts(
int index, com.google.iam.admin.v1.ServiceAccount.Builder builderForValue) {
if (accountsBuilder_ == null) {
ensureAccountsIsMutable();
accounts_.set(index, builderForValue.build());
onChanged();
} else {
accountsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of matching service accounts.
* </pre>
*
* <code>repeated .google.iam.admin.v1.ServiceAccount accounts = 1;</code>
*/
public Builder addAccounts(com.google.iam.admin.v1.ServiceAccount value) {
if (accountsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAccountsIsMutable();
accounts_.add(value);
onChanged();
} else {
accountsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of matching service accounts.
* </pre>
*
* <code>repeated .google.iam.admin.v1.ServiceAccount accounts = 1;</code>
*/
public Builder addAccounts(int index, com.google.iam.admin.v1.ServiceAccount value) {
if (accountsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAccountsIsMutable();
accounts_.add(index, value);
onChanged();
} else {
accountsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of matching service accounts.
* </pre>
*
* <code>repeated .google.iam.admin.v1.ServiceAccount accounts = 1;</code>
*/
public Builder addAccounts(com.google.iam.admin.v1.ServiceAccount.Builder builderForValue) {
if (accountsBuilder_ == null) {
ensureAccountsIsMutable();
accounts_.add(builderForValue.build());
onChanged();
} else {
accountsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of matching service accounts.
* </pre>
*
* <code>repeated .google.iam.admin.v1.ServiceAccount accounts = 1;</code>
*/
public Builder addAccounts(
int index, com.google.iam.admin.v1.ServiceAccount.Builder builderForValue) {
if (accountsBuilder_ == null) {
ensureAccountsIsMutable();
accounts_.add(index, builderForValue.build());
onChanged();
} else {
accountsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of matching service accounts.
* </pre>
*
* <code>repeated .google.iam.admin.v1.ServiceAccount accounts = 1;</code>
*/
public Builder addAllAccounts(
java.lang.Iterable<? extends com.google.iam.admin.v1.ServiceAccount> values) {
if (accountsBuilder_ == null) {
ensureAccountsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, accounts_);
onChanged();
} else {
accountsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of matching service accounts.
* </pre>
*
* <code>repeated .google.iam.admin.v1.ServiceAccount accounts = 1;</code>
*/
public Builder clearAccounts() {
if (accountsBuilder_ == null) {
accounts_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
accountsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of matching service accounts.
* </pre>
*
* <code>repeated .google.iam.admin.v1.ServiceAccount accounts = 1;</code>
*/
public Builder removeAccounts(int index) {
if (accountsBuilder_ == null) {
ensureAccountsIsMutable();
accounts_.remove(index);
onChanged();
} else {
accountsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of matching service accounts.
* </pre>
*
* <code>repeated .google.iam.admin.v1.ServiceAccount accounts = 1;</code>
*/
public com.google.iam.admin.v1.ServiceAccount.Builder getAccountsBuilder(int index) {
return getAccountsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of matching service accounts.
* </pre>
*
* <code>repeated .google.iam.admin.v1.ServiceAccount accounts = 1;</code>
*/
public com.google.iam.admin.v1.ServiceAccountOrBuilder getAccountsOrBuilder(int index) {
if (accountsBuilder_ == null) {
return accounts_.get(index);
} else {
return accountsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of matching service accounts.
* </pre>
*
* <code>repeated .google.iam.admin.v1.ServiceAccount accounts = 1;</code>
*/
public java.util.List<? extends com.google.iam.admin.v1.ServiceAccountOrBuilder>
getAccountsOrBuilderList() {
if (accountsBuilder_ != null) {
return accountsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(accounts_);
}
}
/**
*
*
* <pre>
* The list of matching service accounts.
* </pre>
*
* <code>repeated .google.iam.admin.v1.ServiceAccount accounts = 1;</code>
*/
public com.google.iam.admin.v1.ServiceAccount.Builder addAccountsBuilder() {
return getAccountsFieldBuilder()
.addBuilder(com.google.iam.admin.v1.ServiceAccount.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of matching service accounts.
* </pre>
*
* <code>repeated .google.iam.admin.v1.ServiceAccount accounts = 1;</code>
*/
public com.google.iam.admin.v1.ServiceAccount.Builder addAccountsBuilder(int index) {
return getAccountsFieldBuilder()
.addBuilder(index, com.google.iam.admin.v1.ServiceAccount.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of matching service accounts.
* </pre>
*
* <code>repeated .google.iam.admin.v1.ServiceAccount accounts = 1;</code>
*/
public java.util.List<com.google.iam.admin.v1.ServiceAccount.Builder> getAccountsBuilderList() {
return getAccountsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.iam.admin.v1.ServiceAccount,
com.google.iam.admin.v1.ServiceAccount.Builder,
com.google.iam.admin.v1.ServiceAccountOrBuilder>
getAccountsFieldBuilder() {
if (accountsBuilder_ == null) {
accountsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.iam.admin.v1.ServiceAccount,
com.google.iam.admin.v1.ServiceAccount.Builder,
com.google.iam.admin.v1.ServiceAccountOrBuilder>(
accounts_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
accounts_ = null;
}
return accountsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* To retrieve the next page of results, set
* [ListServiceAccountsRequest.page_token][google.iam.admin.v1.ListServiceAccountsRequest.page_token]
* to this value.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* To retrieve the next page of results, set
* [ListServiceAccountsRequest.page_token][google.iam.admin.v1.ListServiceAccountsRequest.page_token]
* to this value.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* To retrieve the next page of results, set
* [ListServiceAccountsRequest.page_token][google.iam.admin.v1.ListServiceAccountsRequest.page_token]
* to this value.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* To retrieve the next page of results, set
* [ListServiceAccountsRequest.page_token][google.iam.admin.v1.ListServiceAccountsRequest.page_token]
* to this value.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* To retrieve the next page of results, set
* [ListServiceAccountsRequest.page_token][google.iam.admin.v1.ListServiceAccountsRequest.page_token]
* to this value.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.iam.admin.v1.ListServiceAccountsResponse)
}
// @@protoc_insertion_point(class_scope:google.iam.admin.v1.ListServiceAccountsResponse)
private static final com.google.iam.admin.v1.ListServiceAccountsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.iam.admin.v1.ListServiceAccountsResponse();
}
public static com.google.iam.admin.v1.ListServiceAccountsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListServiceAccountsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListServiceAccountsResponse>() {
@java.lang.Override
public ListServiceAccountsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListServiceAccountsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListServiceAccountsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.iam.admin.v1.ListServiceAccountsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/struts | 35,463 | core/src/test/java/org/apache/struts2/views/jsp/IteratorTagTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.struts2.views.jsp;
import jakarta.servlet.jsp.JspException;
import jakarta.servlet.jsp.tagext.TagSupport;
import org.apache.commons.collections.ListUtils;
import org.springframework.mock.web.MockBodyContent;
import org.springframework.mock.web.MockJspWriter;
import java.io.StringWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
/**
* Test Case for Iterator Tag
*/
public class IteratorTagTest extends AbstractUITagTest {
private IteratorTag tag;
public void testIteratingWithIdSpecified() throws Exception {
List<String> list = new ArrayList<>();
list.add("one");
list.add("two");
list.add("three");
list.add("four");
list.add("five");
Foo foo = new Foo();
foo.setList(list);
stack.push(foo);
tag.setValue("list");
tag.setVar("myId");
// one
int result = tag.doStartTag();
assertEquals(TagSupport.EVAL_BODY_INCLUDE, result);
assertEquals(stack.peek(), "one");
assertEquals(stack.getContext().get("myId"), "one");
tag.doInitBody();
// two
result = tag.doAfterBody();
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertEquals(stack.peek(), "two");
assertEquals(stack.getContext().get("myId"), "two");
// three
result = tag.doAfterBody();
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertEquals(stack.peek(), "three");
assertEquals(stack.getContext().get("myId"), "three");
// four
result = tag.doAfterBody();
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertEquals(stack.peek(), "four");
assertEquals(stack.getContext().get("myId"), "four");
// five
result = tag.doAfterBody();
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertEquals(stack.peek(), "five");
assertEquals(stack.getContext().get("myId"), "five");
result = tag.doAfterBody();
assertEquals(TagSupport.SKIP_BODY, result);
result = tag.doEndTag();
assertEquals(TagSupport.EVAL_PAGE, result);
// Basic sanity check of clearTagStateForTagPoolingServers() behaviour for Struts Tags after doEndTag().
IteratorTag freshTag = new IteratorTag();
freshTag.setPageContext(pageContext);
assertFalse("Tag state after doEndTag() under default tag clear state is equal to new Tag with pageContext/parent set. " +
"May indicate that clearTagStateForTagPoolingServers() calls are not working properly.",
strutsBodyTagsAreReflectionEqual(tag, freshTag));
}
public void testIteratingWithIdSpecified_clearTagStateSet() throws Exception {
List<String> list = new ArrayList<>();
list.add("one");
list.add("two");
list.add("three");
list.add("four");
list.add("five");
Foo foo = new Foo();
foo.setList(list);
stack.push(foo);
tag.setPerformClearTagStateForTagPoolingServers(true); // Explicitly request tag state clearing.
tag.setValue("list");
tag.setVar("myId");
// one
int result = tag.doStartTag();
setComponentTagClearTagState(tag, true); // Ensure component tag state clearing is set true (to match tag).
assertEquals(TagSupport.EVAL_BODY_INCLUDE, result);
assertEquals(stack.peek(), "one");
assertEquals(stack.getContext().get("myId"), "one");
tag.doInitBody();
// two
result = tag.doAfterBody();
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertEquals(stack.peek(), "two");
assertEquals(stack.getContext().get("myId"), "two");
// three
result = tag.doAfterBody();
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertEquals(stack.peek(), "three");
assertEquals(stack.getContext().get("myId"), "three");
// four
result = tag.doAfterBody();
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertEquals(stack.peek(), "four");
assertEquals(stack.getContext().get("myId"), "four");
// five
result = tag.doAfterBody();
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertEquals(stack.peek(), "five");
assertEquals(stack.getContext().get("myId"), "five");
result = tag.doAfterBody();
assertEquals(TagSupport.SKIP_BODY, result);
result = tag.doEndTag();
assertEquals(TagSupport.EVAL_PAGE, result);
// Basic sanity check of clearTagStateForTagPoolingServers() behaviour for Struts Tags after doEndTag().
IteratorTag freshTag = new IteratorTag();
freshTag.setPerformClearTagStateForTagPoolingServers(true);
freshTag.setPageContext(pageContext);
assertTrue("Tag state after doEndTag() and explicit tag state clearing is inequal to new Tag with pageContext/parent set. " +
"May indicate that clearTagStateForTagPoolingServers() calls are not working properly.",
strutsBodyTagsAreReflectionEqual(tag, freshTag));
}
public void testIteratingWithIdSpecifiedAndNullElementOnCollection() throws Exception {
List<String> list = new ArrayList<>();
list.add("one");
list.add(null);
list.add("three");
Foo foo = new Foo();
foo.setList(list);
stack.push(foo);
tag.setValue("list");
tag.setVar("myId");
// one
int result = tag.doStartTag();
assertEquals(TagSupport.EVAL_BODY_INCLUDE, result);
assertEquals(stack.peek(), "one");
assertEquals(stack.getContext().get("myId"), "one");
tag.doInitBody();
// two
result = tag.doAfterBody();
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertNull(stack.peek());
assertNull(stack.getContext().get("myId"));
// three
result = tag.doAfterBody();
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertEquals(stack.peek(), "three");
assertEquals(stack.getContext().get("myId"), "three");
result = tag.doAfterBody();
assertEquals(TagSupport.SKIP_BODY, result);
result = tag.doEndTag();
assertEquals(TagSupport.EVAL_PAGE, result);
// Basic sanity check of clearTagStateForTagPoolingServers() behaviour for Struts Tags after doEndTag().
IteratorTag freshTag = new IteratorTag();
freshTag.setPageContext(pageContext);
assertFalse("Tag state after doEndTag() under default tag clear state is equal to new Tag with pageContext/parent set. " +
"May indicate that clearTagStateForTagPoolingServers() calls are not working properly.",
strutsBodyTagsAreReflectionEqual(tag, freshTag));
}
public void testIteratingWithIdSpecifiedAndNullElementOnCollection_clearTagStateSet() throws Exception {
List<String> list = new ArrayList<>();
list.add("one");
list.add(null);
list.add("three");
Foo foo = new Foo();
foo.setList(list);
stack.push(foo);
tag.setPerformClearTagStateForTagPoolingServers(true); // Explicitly request tag state clearing.
tag.setValue("list");
tag.setVar("myId");
// one
int result = tag.doStartTag();
setComponentTagClearTagState(tag, true); // Ensure component tag state clearing is set true (to match tag).
assertEquals(TagSupport.EVAL_BODY_INCLUDE, result);
assertEquals(stack.peek(), "one");
assertEquals(stack.getContext().get("myId"), "one");
tag.doInitBody();
// two
result = tag.doAfterBody();
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertNull(stack.peek());
assertNull(stack.getContext().get("myId"));
// three
result = tag.doAfterBody();
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertEquals(stack.peek(), "three");
assertEquals(stack.getContext().get("myId"), "three");
result = tag.doAfterBody();
assertEquals(TagSupport.SKIP_BODY, result);
result = tag.doEndTag();
assertEquals(TagSupport.EVAL_PAGE, result);
// Basic sanity check of clearTagStateForTagPoolingServers() behaviour for Struts Tags after doEndTag().
IteratorTag freshTag = new IteratorTag();
freshTag.setPerformClearTagStateForTagPoolingServers(true);
freshTag.setPageContext(pageContext);
assertTrue("Tag state after doEndTag() and explicit tag state clearing is inequal to new Tag with pageContext/parent set. " +
"May indicate that clearTagStateForTagPoolingServers() calls are not working properly.",
strutsBodyTagsAreReflectionEqual(tag, freshTag));
}
public void testArrayIterator() {
Foo foo = new Foo();
foo.setArray(new String[]{"test1", "test2", "test3"});
stack.push(foo);
tag.setValue("array");
iterateThreeStrings();
}
public void testCollectionIterator() {
Foo foo = new Foo();
List<String> list = new ArrayList<>();
list.add("test1");
list.add("test2");
list.add("test3");
foo.setList(list);
stack.push(foo);
tag.setValue("list");
iterateThreeStrings();
}
public void testIteratorWithDefaultValue() {
stack.push(new String[]{"test1", "test2", "test3"});
iterateThreeStrings();
}
public void testMapIterator() {
Foo foo = new Foo();
HashMap<String, String> map = new HashMap<>();
map.put("test1", "123");
map.put("test2", "456");
map.put("test3", "789");
foo.setMap(map);
stack.push(foo);
tag.setValue("map");
int result = 0;
try {
result = tag.doStartTag();
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.EVAL_BODY_INCLUDE, result);
assertEquals(4, stack.size());
assertTrue(stack.getRoot().peek() instanceof Map.Entry);
try {
result = tag.doAfterBody();
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertEquals(4, stack.size());
assertTrue(stack.getRoot().peek() instanceof Map.Entry);
try {
result = tag.doAfterBody();
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertEquals(4, stack.size());
assertTrue(stack.getRoot().peek() instanceof Map.Entry);
try {
result = tag.doAfterBody();
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.SKIP_BODY, result);
assertEquals(3, stack.size());
try {
result = tag.doEndTag();
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.EVAL_PAGE, result);
// Basic sanity check of clearTagStateForTagPoolingServers() behaviour for Struts Tags after doEndTag().
IteratorTag freshTag = new IteratorTag();
freshTag.setPageContext(pageContext);
assertFalse("Tag state after doEndTag() under default tag clear state is equal to new Tag with pageContext/parent set. " +
"May indicate that clearTagStateForTagPoolingServers() calls are not working properly.",
strutsBodyTagsAreReflectionEqual(tag, freshTag));
}
public void testMapIterator_clearTagStateSet() {
Foo foo = new Foo();
HashMap<String, String> map = new HashMap<>();
map.put("test1", "123");
map.put("test2", "456");
map.put("test3", "789");
foo.setMap(map);
stack.push(foo);
tag.setPerformClearTagStateForTagPoolingServers(true); // Explicitly request tag state clearing.
tag.setValue("map");
int result = 0;
try {
result = tag.doStartTag();
setComponentTagClearTagState(tag, true); // Ensure component tag state clearing is set true (to match tag).
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.EVAL_BODY_INCLUDE, result);
assertEquals(4, stack.size());
assertTrue(stack.getRoot().peek() instanceof Map.Entry);
try {
result = tag.doAfterBody();
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertEquals(4, stack.size());
assertTrue(stack.getRoot().peek() instanceof Map.Entry);
try {
result = tag.doAfterBody();
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertEquals(4, stack.size());
assertTrue(stack.getRoot().peek() instanceof Map.Entry);
try {
result = tag.doAfterBody();
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.SKIP_BODY, result);
assertEquals(3, stack.size());
try {
result = tag.doEndTag();
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.EVAL_PAGE, result);
// Basic sanity check of clearTagStateForTagPoolingServers() behaviour for Struts Tags after doEndTag().
IteratorTag freshTag = new IteratorTag();
freshTag.setPerformClearTagStateForTagPoolingServers(true);
freshTag.setPageContext(pageContext);
assertTrue("Tag state after doEndTag() and explicit tag state clearing is inequal to new Tag with pageContext/parent set. " +
"May indicate that clearTagStateForTagPoolingServers() calls are not working properly.",
strutsBodyTagsAreReflectionEqual(tag, freshTag));
}
public void testStatus() {
Foo foo = new Foo();
foo.setArray(new String[]{"test1", "test2", "test3"});
stack.push(foo);
tag.setValue("array");
tag.setStatus("fooStatus");
int result = 0;
try {
result = tag.doStartTag();
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.EVAL_BODY_INCLUDE, result);
assertEquals("test1", stack.getRoot().peek());
assertEquals(4, stack.size());
IteratorStatus status = (IteratorStatus) context.get("fooStatus");
assertNotNull(status);
assertFalse(status.isLast());
assertTrue(status.isFirst());
assertEquals(0, status.getIndex());
assertEquals("0", status.getIndexStr());
assertEquals(1, status.getCount());
assertEquals("1", status.getCountStr());
assertTrue(status.isOdd());
assertFalse(status.isEven());
try {
result = tag.doAfterBody();
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertEquals("test2", stack.getRoot().peek());
assertEquals(4, stack.size());
status = (IteratorStatus) context.get("fooStatus");
assertNotNull(status);
assertFalse(status.isLast());
assertFalse(status.isFirst());
assertEquals(1, status.getIndex());
assertEquals(2, status.getCount());
assertFalse(status.isOdd());
assertTrue(status.isEven());
try {
result = tag.doAfterBody();
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertEquals("test3", stack.getRoot().peek());
assertEquals(4, stack.size());
status = (IteratorStatus) context.get("fooStatus");
assertNotNull(status);
assertTrue(status.isLast());
assertFalse(status.isFirst());
assertEquals(2, status.getIndex());
assertEquals(3, status.getCount());
assertTrue(status.isOdd());
assertFalse(status.isEven());
try {
result = tag.doEndTag();
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.EVAL_PAGE, result);
// Basic sanity check of clearTagStateForTagPoolingServers() behaviour for Struts Tags after doEndTag().
IteratorTag freshTag = new IteratorTag();
freshTag.setPageContext(pageContext);
assertFalse("Tag state after doEndTag() under default tag clear state is equal to new Tag with pageContext/parent set. " +
"May indicate that clearTagStateForTagPoolingServers() calls are not working properly.",
strutsBodyTagsAreReflectionEqual(tag, freshTag));
}
public void testStatus_clearTagStateSet() {
Foo foo = new Foo();
foo.setArray(new String[]{"test1", "test2", "test3"});
stack.push(foo);
tag.setPerformClearTagStateForTagPoolingServers(true); // Explicitly request tag state clearing.
tag.setValue("array");
tag.setStatus("fooStatus");
int result = 0;
try {
result = tag.doStartTag();
setComponentTagClearTagState(tag, true); // Ensure component tag state clearing is set true (to match tag).
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.EVAL_BODY_INCLUDE, result);
assertEquals("test1", stack.getRoot().peek());
assertEquals(4, stack.size());
IteratorStatus status = (IteratorStatus) context.get("fooStatus");
assertNotNull(status);
assertFalse(status.isLast());
assertTrue(status.isFirst());
assertEquals(0, status.getIndex());
assertEquals(1, status.getCount());
assertTrue(status.isOdd());
assertFalse(status.isEven());
try {
result = tag.doAfterBody();
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertEquals("test2", stack.getRoot().peek());
assertEquals(4, stack.size());
status = (IteratorStatus) context.get("fooStatus");
assertNotNull(status);
assertFalse(status.isLast());
assertFalse(status.isFirst());
assertEquals(1, status.getIndex());
assertEquals(2, status.getCount());
assertFalse(status.isOdd());
assertTrue(status.isEven());
try {
result = tag.doAfterBody();
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertEquals("test3", stack.getRoot().peek());
assertEquals(4, stack.size());
status = (IteratorStatus) context.get("fooStatus");
assertNotNull(status);
assertTrue(status.isLast());
assertFalse(status.isFirst());
assertEquals(2, status.getIndex());
assertEquals(3, status.getCount());
assertTrue(status.isOdd());
assertFalse(status.isEven());
try {
result = tag.doEndTag();
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.EVAL_PAGE, result);
// Basic sanity check of clearTagStateForTagPoolingServers() behaviour for Struts Tags after doEndTag().
IteratorTag freshTag = new IteratorTag();
freshTag.setPerformClearTagStateForTagPoolingServers(true);
freshTag.setPageContext(pageContext);
assertTrue("Tag state after doEndTag() and explicit tag state clearing is inequal to new Tag with pageContext/parent set. " +
"May indicate that clearTagStateForTagPoolingServers() calls are not working properly.",
strutsBodyTagsAreReflectionEqual(tag, freshTag));
}
public void testEmptyArray() {
Foo foo = new Foo();
foo.setArray(new String[]{});
stack.push(foo);
tag.setValue("array");
validateSkipBody();
}
public void testNullArray() {
Foo foo = new Foo();
foo.setArray(null);
stack.push(foo);
tag.setValue("array");
validateSkipBody();
}
public void testEmptyCollection() {
Foo foo = new Foo();
foo.setList(new ArrayList<>());
stack.push(foo);
tag.setValue("list");
validateSkipBody();
}
public void testNullCollection() {
Foo foo = new Foo();
foo.setList(null);
stack.push(foo);
tag.setValue("list");
validateSkipBody();
}
public void testCounter() throws JspException {
tag.setBegin("0");
tag.setEnd("5");
validateCounter(new Integer[]{0, 1, 2, 3, 4, 5});
}
public void testCounterWithDifferentLocale() throws JspException {
stack.getActionContext().withLocale(new Locale("fa_IR"));
tag.setVar("it");
tag.setBegin("0");
tag.setEnd("5");
List<String> expectedValues = Arrays.asList("0", "1", "2", "3", "4", "5");
ArrayList<String> values = new ArrayList<>();
try {
int result = tag.doStartTag();
assertEquals(TagSupport.EVAL_BODY_INCLUDE, result);
values.add((String) stack.findValue("it", String.class));
} catch (JspException e) {
fail(e.getMessage());
}
while (tag.doAfterBody() == TagSupport.EVAL_BODY_AGAIN) {
values.add((String) stack.findValue("top", String.class));
}
assertEquals(expectedValues.size(), values.size());
assertEquals(expectedValues, values);
}
public void testCounterWithStackValues() throws JspException {
stack.getContext().put("begin", 0);
stack.getContext().put("end", 5);
tag.setBegin("begin");
tag.setEnd("end");
validateCounter(new Integer[]{0, 1, 2, 3, 4, 5});
}
public void testCounterWithList() throws JspException {
Foo foo = new Foo();
ArrayList<String> list = new ArrayList<>();
list.add("a");
list.add("b");
list.add("c");
list.add("d");
foo.setList(list);
stack.push(foo);
tag.setValue("list");
tag.setBegin("0");
tag.setEnd("2");
validateCounter(new String[]{"a", "b", "c"});
}
public void testNullElements() throws JspException {
Foo foo = new Foo();
foo.setArray(new String[3]);
stack.push(foo);
tag.setValue("array");
tag.setVar("anId");
// one
int result = tag.doStartTag();
assertEquals(TagSupport.EVAL_BODY_INCLUDE, result);
assertNull(stack.peek());
assertNull(stack.getContext().get("anId"));
tag.doInitBody();
// two
result = tag.doAfterBody();
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertNull(stack.peek());
assertNull(stack.getContext().get("anId"));
// three
result = tag.doAfterBody();
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertNull(stack.peek());
assertNull(stack.getContext().get("anId"));
result = tag.doAfterBody();
assertEquals(TagSupport.SKIP_BODY, result);
result = tag.doEndTag();
assertEquals(TagSupport.EVAL_PAGE, result);
}
public void testCounterWithArray() throws JspException {
Foo foo = new Foo();
foo.setArray(new String[]{"a", "b", "c", "d"});
stack.push(foo);
tag.setValue("array");
tag.setBegin("0");
tag.setEnd("2");
validateCounter(new String[]{"a", "b", "c"});
}
public void testCounterWithListNoEnd() throws JspException {
Foo foo = new Foo();
ArrayList<String> list = new ArrayList<>();
list.add("a");
list.add("b");
list.add("c");
list.add("d");
foo.setList(list);
stack.push(foo);
tag.setValue("list");
tag.setBegin("0");
validateCounter(new String[]{"a", "b", "c", "d"});
}
public void testCounterWithArrayNoEnd() throws JspException {
Foo foo = new Foo();
foo.setArray(new String[]{"a", "b", "c", "d"});
stack.push(foo);
tag.setValue("array");
tag.setBegin("0");
validateCounter(new String[]{"a", "b", "c", "d"});
}
public void testCounterWithList2() throws JspException {
Foo foo = new Foo();
ArrayList<String> list = new ArrayList<>();
list.add("a");
list.add("b");
list.add("c");
list.add("d");
foo.setList(list);
stack.push(foo);
tag.setValue("list");
tag.setBegin("1");
tag.setEnd("2");
validateCounter(new String[]{"b", "c"});
}
public void testCounterWithArray2() throws JspException {
Foo foo = new Foo();
foo.setArray(new String[]{"a", "b", "c", "d"});
stack.push(foo);
tag.setValue("array");
tag.setBegin("1");
tag.setEnd("2");
validateCounter(new String[]{"b", "c"});
}
public void testCounterWithListNoEnd2() throws JspException {
Foo foo = new Foo();
ArrayList<String> list = new ArrayList<>();
list.add("a");
list.add("b");
list.add("c");
list.add("d");
foo.setList(list);
stack.push(foo);
tag.setValue("list");
tag.setBegin("2");
validateCounter(new String[]{"c", "d"});
}
public void testCounterWithArrayNoEnd2() throws JspException {
Foo foo = new Foo();
foo.setArray(new String[]{"a", "b", "c", "d"});
stack.push(foo);
tag.setValue("array");
tag.setBegin("2");
validateCounter(new String[]{"c", "d"});
}
public void testCounter2() throws JspException {
tag.setBegin("2");
tag.setEnd("5");
validateCounter(new Integer[]{2, 3, 4, 5});
}
public void testCounterWithStep() throws JspException {
tag.setBegin("0");
tag.setEnd("5");
tag.setStep("2");
validateCounter(new Integer[]{0, 2, 4});
}
public void testCounterWithListAndStep() throws JspException {
Foo foo = new Foo();
ArrayList<String> list = new ArrayList<>();
list.add("a");
list.add("b");
list.add("c");
list.add("d");
foo.setList(list);
stack.push(foo);
tag.setValue("list");
tag.setStep("2");
tag.setBegin("0");
tag.setEnd("3");
validateCounter(new String[]{"a", "c"});
}
public void testCounterWithArrayAndStep() throws JspException {
Foo foo = new Foo();
foo.setArray(new String[]{"a", "b", "c", "d"});
stack.push(foo);
tag.setValue("array");
tag.setStep("2");
tag.setBegin("0");
tag.setEnd("3");
validateCounter(new String[]{"a", "c"});
}
public void testCounterWithListAndStepNoEnd() throws JspException {
Foo foo = new Foo();
ArrayList<String> list = new ArrayList<>();
list.add("a");
list.add("b");
list.add("c");
list.add("d");
foo.setList(list);
stack.push(foo);
tag.setValue("list");
tag.setStep("2");
tag.setBegin("0");
validateCounter(new String[]{"a", "c"});
}
public void testCounterWithArrayAndStepNoEnd() throws JspException {
Foo foo = new Foo();
foo.setArray(new String[]{"a", "b", "c", "d"});
stack.push(foo);
tag.setValue("array");
tag.setStep("2");
tag.setBegin("0");
validateCounter(new String[]{"a", "c"});
}
public void testCounterWithNegativeStep() throws JspException {
tag.setBegin("8");
tag.setEnd("5");
tag.setStep("-1");
validateCounter(new Integer[]{8, 7, 6, 5});
}
public void testCounterWithListAndNegativeStep() throws JspException {
Foo foo = new Foo();
ArrayList<String> list = new ArrayList<>();
list.add("a");
list.add("b");
list.add("c");
list.add("d");
foo.setList(list);
stack.push(foo);
tag.setValue("list");
tag.setStep("-1");
tag.setBegin("3");
tag.setEnd("1");
validateCounter(new String[]{"d", "c", "b"});
}
public void testCounterWithListAndNegativeStepNoEnd() throws JspException {
Foo foo = new Foo();
ArrayList<String> list = new ArrayList<>();
list.add("a");
list.add("b");
list.add("c");
list.add("d");
foo.setList(list);
stack.push(foo);
tag.setValue("list");
tag.setStep("-1");
tag.setBegin("3");
validateCounter(new String[]{"d", "c", "b", "a"});
}
public void testCounterWithArrayAndNegativeStep() throws JspException {
Foo foo = new Foo();
ArrayList<String> list = new ArrayList<>();
list.add("a");
list.add("b");
list.add("c");
list.add("d");
foo.setList(list);
stack.push(foo);
tag.setValue("list");
tag.setStep("-1");
tag.setBegin("3");
tag.setEnd("1");
validateCounter(new String[]{"d", "c", "b"});
}
public void testCounterWithArrayAndNegativeStepNoEnd() throws JspException {
Foo foo = new Foo();
ArrayList<String> list = new ArrayList<>();
list.add("a");
list.add("b");
list.add("c");
list.add("d");
foo.setList(list);
stack.push(foo);
tag.setValue("list");
tag.setStep("-1");
tag.setBegin("3");
validateCounter(new String[]{"d", "c", "b", "a"});
}
protected void validateCounter(Object[] expectedValues) throws JspException {
ArrayList<Object> values = new ArrayList<>();
try {
int result = tag.doStartTag();
assertEquals(TagSupport.EVAL_BODY_INCLUDE, result);
values.add(stack.getRoot().peek());
} catch (JspException e) {
fail(e.getMessage());
}
while (tag.doAfterBody() == TagSupport.EVAL_BODY_AGAIN) {
values.add(stack.getRoot().peek());
}
assertEquals(expectedValues.length, values.size());
assertTrue(ListUtils.isEqualList(Arrays.asList(expectedValues), values));
}
@Override
protected void setUp() throws Exception {
super.setUp();
// create the needed objects
tag = new IteratorTag();
MockBodyContent mockBodyContent = new TestMockBodyContent("", new MockJspWriter(new StringWriter()));
tag.setBodyContent(mockBodyContent);
// associate the tag with the mock page request
tag.setPageContext(pageContext);
}
private void iterateThreeStrings() {
int result = 0;
try {
result = tag.doStartTag();
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.EVAL_BODY_INCLUDE, result);
assertEquals("test1", stack.getRoot().peek());
assertEquals(4, stack.size());
try {
result = tag.doAfterBody();
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertEquals("test2", stack.getRoot().peek());
assertEquals(4, stack.size());
try {
result = tag.doAfterBody();
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.EVAL_BODY_AGAIN, result);
assertEquals("test3", stack.getRoot().peek());
assertEquals(4, stack.size());
try {
result = tag.doAfterBody();
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.SKIP_BODY, result);
assertEquals(3, stack.size());
}
private void validateSkipBody() {
int result = 0;
try {
result = tag.doStartTag();
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.SKIP_BODY, result);
try {
result = tag.doEndTag();
} catch (JspException e) {
fail(e.getMessage());
}
assertEquals(TagSupport.EVAL_PAGE, result);
// Basic sanity check of clearTagStateForTagPoolingServers() behaviour for Struts Tags after doEndTag().
IteratorTag freshTag = new IteratorTag();
freshTag.setPageContext(pageContext);
assertFalse("Tag state after doEndTag() under default tag clear state is equal to new Tag with pageContext/parent set. " +
"May indicate that clearTagStateForTagPoolingServers() calls are not working properly.",
strutsBodyTagsAreReflectionEqual(tag, freshTag));
}
static class Foo {
private Collection<String> list;
private Map<String, String> map;
private String[] array;
public void setArray(String[] array) {
this.array = array;
}
public String[] getArray() {
return array;
}
public void setList(Collection<String> list) {
this.list = list;
}
public Collection<String> getList() {
return list;
}
public void setMap(Map<String, String> map) {
this.map = map;
}
public Map<String, String> getMap() {
return map;
}
}
class TestMockBodyContent extends MockBodyContent {
public TestMockBodyContent(String content, Writer targetWriter) {
super(content, response, targetWriter);
}
public String getString() {
return ".-.";
}
}
}
|
googleapis/google-cloud-java | 36,120 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SummarizationVerbosityInput.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/evaluation_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* Input for summarization verbosity metric.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput}
*/
public final class SummarizationVerbosityInput extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput)
SummarizationVerbosityInputOrBuilder {
private static final long serialVersionUID = 0L;
// Use SummarizationVerbosityInput.newBuilder() to construct.
private SummarizationVerbosityInput(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SummarizationVerbosityInput() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SummarizationVerbosityInput();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_SummarizationVerbosityInput_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_SummarizationVerbosityInput_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput.class,
com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput.Builder.class);
}
private int bitField0_;
public static final int METRIC_SPEC_FIELD_NUMBER = 1;
private com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec metricSpec_;
/**
*
*
* <pre>
* Required. Spec for summarization verbosity score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the metricSpec field is set.
*/
@java.lang.Override
public boolean hasMetricSpec() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Spec for summarization verbosity score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The metricSpec.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec getMetricSpec() {
return metricSpec_ == null
? com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec.getDefaultInstance()
: metricSpec_;
}
/**
*
*
* <pre>
* Required. Spec for summarization verbosity score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpecOrBuilder
getMetricSpecOrBuilder() {
return metricSpec_ == null
? com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec.getDefaultInstance()
: metricSpec_;
}
public static final int INSTANCE_FIELD_NUMBER = 2;
private com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance instance_;
/**
*
*
* <pre>
* Required. Summarization verbosity instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the instance field is set.
*/
@java.lang.Override
public boolean hasInstance() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Summarization verbosity instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The instance.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance getInstance() {
return instance_ == null
? com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance.getDefaultInstance()
: instance_;
}
/**
*
*
* <pre>
* Required. Summarization verbosity instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstanceOrBuilder
getInstanceOrBuilder() {
return instance_ == null
? com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance.getDefaultInstance()
: instance_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getMetricSpec());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getInstance());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getMetricSpec());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getInstance());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput other =
(com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput) obj;
if (hasMetricSpec() != other.hasMetricSpec()) return false;
if (hasMetricSpec()) {
if (!getMetricSpec().equals(other.getMetricSpec())) return false;
}
if (hasInstance() != other.hasInstance()) return false;
if (hasInstance()) {
if (!getInstance().equals(other.getInstance())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasMetricSpec()) {
hash = (37 * hash) + METRIC_SPEC_FIELD_NUMBER;
hash = (53 * hash) + getMetricSpec().hashCode();
}
if (hasInstance()) {
hash = (37 * hash) + INSTANCE_FIELD_NUMBER;
hash = (53 * hash) + getInstance().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Input for summarization verbosity metric.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput)
com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInputOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_SummarizationVerbosityInput_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_SummarizationVerbosityInput_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput.class,
com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getMetricSpecFieldBuilder();
getInstanceFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
metricSpec_ = null;
if (metricSpecBuilder_ != null) {
metricSpecBuilder_.dispose();
metricSpecBuilder_ = null;
}
instance_ = null;
if (instanceBuilder_ != null) {
instanceBuilder_.dispose();
instanceBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_SummarizationVerbosityInput_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput build() {
com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput buildPartial() {
com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput result =
new com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.metricSpec_ = metricSpecBuilder_ == null ? metricSpec_ : metricSpecBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.instance_ = instanceBuilder_ == null ? instance_ : instanceBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput) {
return mergeFrom((com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput other) {
if (other
== com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput.getDefaultInstance())
return this;
if (other.hasMetricSpec()) {
mergeMetricSpec(other.getMetricSpec());
}
if (other.hasInstance()) {
mergeInstance(other.getInstance());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getMetricSpecFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getInstanceFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec metricSpec_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec,
com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec.Builder,
com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpecOrBuilder>
metricSpecBuilder_;
/**
*
*
* <pre>
* Required. Spec for summarization verbosity score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the metricSpec field is set.
*/
public boolean hasMetricSpec() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Spec for summarization verbosity score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The metricSpec.
*/
public com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec getMetricSpec() {
if (metricSpecBuilder_ == null) {
return metricSpec_ == null
? com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec.getDefaultInstance()
: metricSpec_;
} else {
return metricSpecBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Spec for summarization verbosity score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setMetricSpec(
com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec value) {
if (metricSpecBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
metricSpec_ = value;
} else {
metricSpecBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for summarization verbosity score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setMetricSpec(
com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec.Builder builderForValue) {
if (metricSpecBuilder_ == null) {
metricSpec_ = builderForValue.build();
} else {
metricSpecBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for summarization verbosity score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeMetricSpec(
com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec value) {
if (metricSpecBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& metricSpec_ != null
&& metricSpec_
!= com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec
.getDefaultInstance()) {
getMetricSpecBuilder().mergeFrom(value);
} else {
metricSpec_ = value;
}
} else {
metricSpecBuilder_.mergeFrom(value);
}
if (metricSpec_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Spec for summarization verbosity score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearMetricSpec() {
bitField0_ = (bitField0_ & ~0x00000001);
metricSpec_ = null;
if (metricSpecBuilder_ != null) {
metricSpecBuilder_.dispose();
metricSpecBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for summarization verbosity score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec.Builder
getMetricSpecBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getMetricSpecFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Spec for summarization verbosity score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpecOrBuilder
getMetricSpecOrBuilder() {
if (metricSpecBuilder_ != null) {
return metricSpecBuilder_.getMessageOrBuilder();
} else {
return metricSpec_ == null
? com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec.getDefaultInstance()
: metricSpec_;
}
}
/**
*
*
* <pre>
* Required. Spec for summarization verbosity score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec,
com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec.Builder,
com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpecOrBuilder>
getMetricSpecFieldBuilder() {
if (metricSpecBuilder_ == null) {
metricSpecBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec,
com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpec.Builder,
com.google.cloud.aiplatform.v1beta1.SummarizationVerbositySpecOrBuilder>(
getMetricSpec(), getParentForChildren(), isClean());
metricSpec_ = null;
}
return metricSpecBuilder_;
}
private com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance instance_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance,
com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance.Builder,
com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstanceOrBuilder>
instanceBuilder_;
/**
*
*
* <pre>
* Required. Summarization verbosity instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the instance field is set.
*/
public boolean hasInstance() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Summarization verbosity instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The instance.
*/
public com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance getInstance() {
if (instanceBuilder_ == null) {
return instance_ == null
? com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance
.getDefaultInstance()
: instance_;
} else {
return instanceBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Summarization verbosity instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInstance(
com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance value) {
if (instanceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
instance_ = value;
} else {
instanceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Summarization verbosity instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInstance(
com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance.Builder
builderForValue) {
if (instanceBuilder_ == null) {
instance_ = builderForValue.build();
} else {
instanceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Summarization verbosity instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeInstance(
com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance value) {
if (instanceBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& instance_ != null
&& instance_
!= com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance
.getDefaultInstance()) {
getInstanceBuilder().mergeFrom(value);
} else {
instance_ = value;
}
} else {
instanceBuilder_.mergeFrom(value);
}
if (instance_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Summarization verbosity instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearInstance() {
bitField0_ = (bitField0_ & ~0x00000002);
instance_ = null;
if (instanceBuilder_ != null) {
instanceBuilder_.dispose();
instanceBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Summarization verbosity instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance.Builder
getInstanceBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getInstanceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Summarization verbosity instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstanceOrBuilder
getInstanceOrBuilder() {
if (instanceBuilder_ != null) {
return instanceBuilder_.getMessageOrBuilder();
} else {
return instance_ == null
? com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance
.getDefaultInstance()
: instance_;
}
}
/**
*
*
* <pre>
* Required. Summarization verbosity instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance,
com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance.Builder,
com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstanceOrBuilder>
getInstanceFieldBuilder() {
if (instanceBuilder_ == null) {
instanceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance,
com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstance.Builder,
com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInstanceOrBuilder>(
getInstance(), getParentForChildren(), isClean());
instance_ = null;
}
return instanceBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput)
private static final com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput();
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SummarizationVerbosityInput> PARSER =
new com.google.protobuf.AbstractParser<SummarizationVerbosityInput>() {
@java.lang.Override
public SummarizationVerbosityInput parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SummarizationVerbosityInput> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SummarizationVerbosityInput> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,204 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/BackendServiceLocalityLoadBalancingPolicyConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* Container for either a built-in LB policy supported by gRPC or Envoy or a custom one implemented by the end user.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig}
*/
public final class BackendServiceLocalityLoadBalancingPolicyConfig
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig)
BackendServiceLocalityLoadBalancingPolicyConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use BackendServiceLocalityLoadBalancingPolicyConfig.newBuilder() to construct.
private BackendServiceLocalityLoadBalancingPolicyConfig(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BackendServiceLocalityLoadBalancingPolicyConfig() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BackendServiceLocalityLoadBalancingPolicyConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_BackendServiceLocalityLoadBalancingPolicyConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_BackendServiceLocalityLoadBalancingPolicyConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig.class,
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig.Builder
.class);
}
private int bitField0_;
public static final int CUSTOM_POLICY_FIELD_NUMBER = 4818368;
private com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy
customPolicy_;
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy custom_policy = 4818368;
* </code>
*
* @return Whether the customPolicy field is set.
*/
@java.lang.Override
public boolean hasCustomPolicy() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy custom_policy = 4818368;
* </code>
*
* @return The customPolicy.
*/
@java.lang.Override
public com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy
getCustomPolicy() {
return customPolicy_ == null
? com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy
.getDefaultInstance()
: customPolicy_;
}
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy custom_policy = 4818368;
* </code>
*/
@java.lang.Override
public com.google.cloud.compute.v1
.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicyOrBuilder
getCustomPolicyOrBuilder() {
return customPolicy_ == null
? com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy
.getDefaultInstance()
: customPolicy_;
}
public static final int POLICY_FIELD_NUMBER = 91071794;
private com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy policy_;
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy policy = 91071794;
* </code>
*
* @return Whether the policy field is set.
*/
@java.lang.Override
public boolean hasPolicy() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy policy = 91071794;
* </code>
*
* @return The policy.
*/
@java.lang.Override
public com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
getPolicy() {
return policy_ == null
? com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
.getDefaultInstance()
: policy_;
}
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy policy = 91071794;
* </code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicyOrBuilder
getPolicyOrBuilder() {
return policy_ == null
? com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
.getDefaultInstance()
: policy_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(4818368, getCustomPolicy());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(91071794, getPolicy());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(4818368, getCustomPolicy());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(91071794, getPolicy());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig other =
(com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig) obj;
if (hasCustomPolicy() != other.hasCustomPolicy()) return false;
if (hasCustomPolicy()) {
if (!getCustomPolicy().equals(other.getCustomPolicy())) return false;
}
if (hasPolicy() != other.hasPolicy()) return false;
if (hasPolicy()) {
if (!getPolicy().equals(other.getPolicy())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasCustomPolicy()) {
hash = (37 * hash) + CUSTOM_POLICY_FIELD_NUMBER;
hash = (53 * hash) + getCustomPolicy().hashCode();
}
if (hasPolicy()) {
hash = (37 * hash) + POLICY_FIELD_NUMBER;
hash = (53 * hash) + getPolicy().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Container for either a built-in LB policy supported by gRPC or Envoy or a custom one implemented by the end user.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig)
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_BackendServiceLocalityLoadBalancingPolicyConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_BackendServiceLocalityLoadBalancingPolicyConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig.class,
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig.Builder
.class);
}
// Construct using
// com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getCustomPolicyFieldBuilder();
getPolicyFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
customPolicy_ = null;
if (customPolicyBuilder_ != null) {
customPolicyBuilder_.dispose();
customPolicyBuilder_ = null;
}
policy_ = null;
if (policyBuilder_ != null) {
policyBuilder_.dispose();
policyBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_BackendServiceLocalityLoadBalancingPolicyConfig_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig
getDefaultInstanceForType() {
return com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig build() {
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig
buildPartial() {
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig result =
new com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.customPolicy_ =
customPolicyBuilder_ == null ? customPolicy_ : customPolicyBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.policy_ = policyBuilder_ == null ? policy_ : policyBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig) {
return mergeFrom(
(com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig other) {
if (other
== com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig
.getDefaultInstance()) return this;
if (other.hasCustomPolicy()) {
mergeCustomPolicy(other.getCustomPolicy());
}
if (other.hasPolicy()) {
mergePolicy(other.getPolicy());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 38546946:
{
input.readMessage(getCustomPolicyFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 38546946
case 728574354:
{
input.readMessage(getPolicyFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 728574354
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy
customPolicy_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy,
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy
.Builder,
com.google.cloud.compute.v1
.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicyOrBuilder>
customPolicyBuilder_;
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy custom_policy = 4818368;
* </code>
*
* @return Whether the customPolicy field is set.
*/
public boolean hasCustomPolicy() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy custom_policy = 4818368;
* </code>
*
* @return The customPolicy.
*/
public com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy
getCustomPolicy() {
if (customPolicyBuilder_ == null) {
return customPolicy_ == null
? com.google.cloud.compute.v1
.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy.getDefaultInstance()
: customPolicy_;
} else {
return customPolicyBuilder_.getMessage();
}
}
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy custom_policy = 4818368;
* </code>
*/
public Builder setCustomPolicy(
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy
value) {
if (customPolicyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
customPolicy_ = value;
} else {
customPolicyBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy custom_policy = 4818368;
* </code>
*/
public Builder setCustomPolicy(
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy
.Builder
builderForValue) {
if (customPolicyBuilder_ == null) {
customPolicy_ = builderForValue.build();
} else {
customPolicyBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy custom_policy = 4818368;
* </code>
*/
public Builder mergeCustomPolicy(
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy
value) {
if (customPolicyBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& customPolicy_ != null
&& customPolicy_
!= com.google.cloud.compute.v1
.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy
.getDefaultInstance()) {
getCustomPolicyBuilder().mergeFrom(value);
} else {
customPolicy_ = value;
}
} else {
customPolicyBuilder_.mergeFrom(value);
}
if (customPolicy_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy custom_policy = 4818368;
* </code>
*/
public Builder clearCustomPolicy() {
bitField0_ = (bitField0_ & ~0x00000001);
customPolicy_ = null;
if (customPolicyBuilder_ != null) {
customPolicyBuilder_.dispose();
customPolicyBuilder_ = null;
}
onChanged();
return this;
}
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy custom_policy = 4818368;
* </code>
*/
public com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy
.Builder
getCustomPolicyBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getCustomPolicyFieldBuilder().getBuilder();
}
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy custom_policy = 4818368;
* </code>
*/
public com.google.cloud.compute.v1
.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicyOrBuilder
getCustomPolicyOrBuilder() {
if (customPolicyBuilder_ != null) {
return customPolicyBuilder_.getMessageOrBuilder();
} else {
return customPolicy_ == null
? com.google.cloud.compute.v1
.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy.getDefaultInstance()
: customPolicy_;
}
}
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy custom_policy = 4818368;
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy,
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy
.Builder,
com.google.cloud.compute.v1
.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicyOrBuilder>
getCustomPolicyFieldBuilder() {
if (customPolicyBuilder_ == null) {
customPolicyBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1
.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy,
com.google.cloud.compute.v1
.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy.Builder,
com.google.cloud.compute.v1
.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicyOrBuilder>(
getCustomPolicy(), getParentForChildren(), isClean());
customPolicy_ = null;
}
return customPolicyBuilder_;
}
private com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
policy_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy,
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
.Builder,
com.google.cloud.compute.v1
.BackendServiceLocalityLoadBalancingPolicyConfigPolicyOrBuilder>
policyBuilder_;
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy policy = 91071794;
* </code>
*
* @return Whether the policy field is set.
*/
public boolean hasPolicy() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy policy = 91071794;
* </code>
*
* @return The policy.
*/
public com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
getPolicy() {
if (policyBuilder_ == null) {
return policy_ == null
? com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
.getDefaultInstance()
: policy_;
} else {
return policyBuilder_.getMessage();
}
}
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy policy = 91071794;
* </code>
*/
public Builder setPolicy(
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy value) {
if (policyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
policy_ = value;
} else {
policyBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy policy = 91071794;
* </code>
*/
public Builder setPolicy(
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy.Builder
builderForValue) {
if (policyBuilder_ == null) {
policy_ = builderForValue.build();
} else {
policyBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy policy = 91071794;
* </code>
*/
public Builder mergePolicy(
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy value) {
if (policyBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& policy_ != null
&& policy_
!= com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
.getDefaultInstance()) {
getPolicyBuilder().mergeFrom(value);
} else {
policy_ = value;
}
} else {
policyBuilder_.mergeFrom(value);
}
if (policy_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy policy = 91071794;
* </code>
*/
public Builder clearPolicy() {
bitField0_ = (bitField0_ & ~0x00000002);
policy_ = null;
if (policyBuilder_ != null) {
policyBuilder_.dispose();
policyBuilder_ = null;
}
onChanged();
return this;
}
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy policy = 91071794;
* </code>
*/
public com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy.Builder
getPolicyBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getPolicyFieldBuilder().getBuilder();
}
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy policy = 91071794;
* </code>
*/
public com.google.cloud.compute.v1
.BackendServiceLocalityLoadBalancingPolicyConfigPolicyOrBuilder
getPolicyOrBuilder() {
if (policyBuilder_ != null) {
return policyBuilder_.getMessageOrBuilder();
} else {
return policy_ == null
? com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
.getDefaultInstance()
: policy_;
}
}
/**
* <code>
* optional .google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy policy = 91071794;
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy,
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
.Builder,
com.google.cloud.compute.v1
.BackendServiceLocalityLoadBalancingPolicyConfigPolicyOrBuilder>
getPolicyFieldBuilder() {
if (policyBuilder_ == null) {
policyBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy,
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
.Builder,
com.google.cloud.compute.v1
.BackendServiceLocalityLoadBalancingPolicyConfigPolicyOrBuilder>(
getPolicy(), getParentForChildren(), isClean());
policy_ = null;
}
return policyBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig)
private static final com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig();
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BackendServiceLocalityLoadBalancingPolicyConfig>
PARSER =
new com.google.protobuf.AbstractParser<
BackendServiceLocalityLoadBalancingPolicyConfig>() {
@java.lang.Override
public BackendServiceLocalityLoadBalancingPolicyConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException()
.setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BackendServiceLocalityLoadBalancingPolicyConfig>
parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BackendServiceLocalityLoadBalancingPolicyConfig>
getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfig
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,278 | java-confidentialcomputing/grpc-google-cloud-confidentialcomputing-v1/src/main/java/com/google/cloud/confidentialcomputing/v1/ConfidentialComputingGrpc.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.confidentialcomputing.v1;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
*
*
* <pre>
* Service describing handlers for resources
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/cloud/confidentialcomputing/v1/service.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class ConfidentialComputingGrpc {
private ConfidentialComputingGrpc() {}
public static final java.lang.String SERVICE_NAME =
"google.cloud.confidentialcomputing.v1.ConfidentialComputing";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.confidentialcomputing.v1.CreateChallengeRequest,
com.google.cloud.confidentialcomputing.v1.Challenge>
getCreateChallengeMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "CreateChallenge",
requestType = com.google.cloud.confidentialcomputing.v1.CreateChallengeRequest.class,
responseType = com.google.cloud.confidentialcomputing.v1.Challenge.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.confidentialcomputing.v1.CreateChallengeRequest,
com.google.cloud.confidentialcomputing.v1.Challenge>
getCreateChallengeMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.confidentialcomputing.v1.CreateChallengeRequest,
com.google.cloud.confidentialcomputing.v1.Challenge>
getCreateChallengeMethod;
if ((getCreateChallengeMethod = ConfidentialComputingGrpc.getCreateChallengeMethod) == null) {
synchronized (ConfidentialComputingGrpc.class) {
if ((getCreateChallengeMethod = ConfidentialComputingGrpc.getCreateChallengeMethod)
== null) {
ConfidentialComputingGrpc.getCreateChallengeMethod =
getCreateChallengeMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.confidentialcomputing.v1.CreateChallengeRequest,
com.google.cloud.confidentialcomputing.v1.Challenge>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateChallenge"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.confidentialcomputing.v1.CreateChallengeRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.confidentialcomputing.v1.Challenge
.getDefaultInstance()))
.setSchemaDescriptor(
new ConfidentialComputingMethodDescriptorSupplier("CreateChallenge"))
.build();
}
}
}
return getCreateChallengeMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.confidentialcomputing.v1.VerifyAttestationRequest,
com.google.cloud.confidentialcomputing.v1.VerifyAttestationResponse>
getVerifyAttestationMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "VerifyAttestation",
requestType = com.google.cloud.confidentialcomputing.v1.VerifyAttestationRequest.class,
responseType = com.google.cloud.confidentialcomputing.v1.VerifyAttestationResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.confidentialcomputing.v1.VerifyAttestationRequest,
com.google.cloud.confidentialcomputing.v1.VerifyAttestationResponse>
getVerifyAttestationMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.confidentialcomputing.v1.VerifyAttestationRequest,
com.google.cloud.confidentialcomputing.v1.VerifyAttestationResponse>
getVerifyAttestationMethod;
if ((getVerifyAttestationMethod = ConfidentialComputingGrpc.getVerifyAttestationMethod)
== null) {
synchronized (ConfidentialComputingGrpc.class) {
if ((getVerifyAttestationMethod = ConfidentialComputingGrpc.getVerifyAttestationMethod)
== null) {
ConfidentialComputingGrpc.getVerifyAttestationMethod =
getVerifyAttestationMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.confidentialcomputing.v1.VerifyAttestationRequest,
com.google.cloud.confidentialcomputing.v1.VerifyAttestationResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "VerifyAttestation"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.confidentialcomputing.v1.VerifyAttestationRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.confidentialcomputing.v1.VerifyAttestationResponse
.getDefaultInstance()))
.setSchemaDescriptor(
new ConfidentialComputingMethodDescriptorSupplier("VerifyAttestation"))
.build();
}
}
}
return getVerifyAttestationMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceRequest,
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceResponse>
getVerifyConfidentialSpaceMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "VerifyConfidentialSpace",
requestType = com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceRequest.class,
responseType =
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceRequest,
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceResponse>
getVerifyConfidentialSpaceMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceRequest,
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceResponse>
getVerifyConfidentialSpaceMethod;
if ((getVerifyConfidentialSpaceMethod =
ConfidentialComputingGrpc.getVerifyConfidentialSpaceMethod)
== null) {
synchronized (ConfidentialComputingGrpc.class) {
if ((getVerifyConfidentialSpaceMethod =
ConfidentialComputingGrpc.getVerifyConfidentialSpaceMethod)
== null) {
ConfidentialComputingGrpc.getVerifyConfidentialSpaceMethod =
getVerifyConfidentialSpaceMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceRequest,
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
generateFullMethodName(SERVICE_NAME, "VerifyConfidentialSpace"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.confidentialcomputing.v1
.VerifyConfidentialSpaceRequest.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.confidentialcomputing.v1
.VerifyConfidentialSpaceResponse.getDefaultInstance()))
.setSchemaDescriptor(
new ConfidentialComputingMethodDescriptorSupplier(
"VerifyConfidentialSpace"))
.build();
}
}
}
return getVerifyConfidentialSpaceMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeRequest,
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeResponse>
getVerifyConfidentialGkeMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "VerifyConfidentialGke",
requestType = com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeRequest.class,
responseType = com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeRequest,
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeResponse>
getVerifyConfidentialGkeMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeRequest,
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeResponse>
getVerifyConfidentialGkeMethod;
if ((getVerifyConfidentialGkeMethod = ConfidentialComputingGrpc.getVerifyConfidentialGkeMethod)
== null) {
synchronized (ConfidentialComputingGrpc.class) {
if ((getVerifyConfidentialGkeMethod =
ConfidentialComputingGrpc.getVerifyConfidentialGkeMethod)
== null) {
ConfidentialComputingGrpc.getVerifyConfidentialGkeMethod =
getVerifyConfidentialGkeMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeRequest,
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
generateFullMethodName(SERVICE_NAME, "VerifyConfidentialGke"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.confidentialcomputing.v1
.VerifyConfidentialGkeResponse.getDefaultInstance()))
.setSchemaDescriptor(
new ConfidentialComputingMethodDescriptorSupplier(
"VerifyConfidentialGke"))
.build();
}
}
}
return getVerifyConfidentialGkeMethod;
}
/** Creates a new async stub that supports all call types for the service */
public static ConfidentialComputingStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<ConfidentialComputingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<ConfidentialComputingStub>() {
@java.lang.Override
public ConfidentialComputingStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ConfidentialComputingStub(channel, callOptions);
}
};
return ConfidentialComputingStub.newStub(factory, channel);
}
/** Creates a new blocking-style stub that supports all types of calls on the service */
public static ConfidentialComputingBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<ConfidentialComputingBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<ConfidentialComputingBlockingV2Stub>() {
@java.lang.Override
public ConfidentialComputingBlockingV2Stub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ConfidentialComputingBlockingV2Stub(channel, callOptions);
}
};
return ConfidentialComputingBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static ConfidentialComputingBlockingStub newBlockingStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<ConfidentialComputingBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<ConfidentialComputingBlockingStub>() {
@java.lang.Override
public ConfidentialComputingBlockingStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ConfidentialComputingBlockingStub(channel, callOptions);
}
};
return ConfidentialComputingBlockingStub.newStub(factory, channel);
}
/** Creates a new ListenableFuture-style stub that supports unary calls on the service */
public static ConfidentialComputingFutureStub newFutureStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<ConfidentialComputingFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<ConfidentialComputingFutureStub>() {
@java.lang.Override
public ConfidentialComputingFutureStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ConfidentialComputingFutureStub(channel, callOptions);
}
};
return ConfidentialComputingFutureStub.newStub(factory, channel);
}
/**
*
*
* <pre>
* Service describing handlers for resources
* </pre>
*/
public interface AsyncService {
/**
*
*
* <pre>
* Creates a new Challenge in a given project and location.
* </pre>
*/
default void createChallenge(
com.google.cloud.confidentialcomputing.v1.CreateChallengeRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.confidentialcomputing.v1.Challenge>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getCreateChallengeMethod(), responseObserver);
}
/**
*
*
* <pre>
* Verifies the provided attestation info, returning a signed attestation
* token.
* </pre>
*/
default void verifyAttestation(
com.google.cloud.confidentialcomputing.v1.VerifyAttestationRequest request,
io.grpc.stub.StreamObserver<
com.google.cloud.confidentialcomputing.v1.VerifyAttestationResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getVerifyAttestationMethod(), responseObserver);
}
/**
*
*
* <pre>
* Verifies whether the provided attestation info is valid, returning a signed
* attestation token if so.
* </pre>
*/
default void verifyConfidentialSpace(
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceRequest request,
io.grpc.stub.StreamObserver<
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getVerifyConfidentialSpaceMethod(), responseObserver);
}
/**
*
*
* <pre>
* Verifies the provided Confidential GKE attestation info, returning a signed
* OIDC token.
* </pre>
*/
default void verifyConfidentialGke(
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeRequest request,
io.grpc.stub.StreamObserver<
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getVerifyConfidentialGkeMethod(), responseObserver);
}
}
/**
* Base class for the server implementation of the service ConfidentialComputing.
*
* <pre>
* Service describing handlers for resources
* </pre>
*/
public abstract static class ConfidentialComputingImplBase
implements io.grpc.BindableService, AsyncService {
@java.lang.Override
public final io.grpc.ServerServiceDefinition bindService() {
return ConfidentialComputingGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service ConfidentialComputing.
*
* <pre>
* Service describing handlers for resources
* </pre>
*/
public static final class ConfidentialComputingStub
extends io.grpc.stub.AbstractAsyncStub<ConfidentialComputingStub> {
private ConfidentialComputingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ConfidentialComputingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ConfidentialComputingStub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates a new Challenge in a given project and location.
* </pre>
*/
public void createChallenge(
com.google.cloud.confidentialcomputing.v1.CreateChallengeRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.confidentialcomputing.v1.Challenge>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getCreateChallengeMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Verifies the provided attestation info, returning a signed attestation
* token.
* </pre>
*/
public void verifyAttestation(
com.google.cloud.confidentialcomputing.v1.VerifyAttestationRequest request,
io.grpc.stub.StreamObserver<
com.google.cloud.confidentialcomputing.v1.VerifyAttestationResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getVerifyAttestationMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Verifies whether the provided attestation info is valid, returning a signed
* attestation token if so.
* </pre>
*/
public void verifyConfidentialSpace(
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceRequest request,
io.grpc.stub.StreamObserver<
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getVerifyConfidentialSpaceMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Verifies the provided Confidential GKE attestation info, returning a signed
* OIDC token.
* </pre>
*/
public void verifyConfidentialGke(
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeRequest request,
io.grpc.stub.StreamObserver<
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getVerifyConfidentialGkeMethod(), getCallOptions()),
request,
responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service ConfidentialComputing.
*
* <pre>
* Service describing handlers for resources
* </pre>
*/
public static final class ConfidentialComputingBlockingV2Stub
extends io.grpc.stub.AbstractBlockingStub<ConfidentialComputingBlockingV2Stub> {
private ConfidentialComputingBlockingV2Stub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ConfidentialComputingBlockingV2Stub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ConfidentialComputingBlockingV2Stub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates a new Challenge in a given project and location.
* </pre>
*/
public com.google.cloud.confidentialcomputing.v1.Challenge createChallenge(
com.google.cloud.confidentialcomputing.v1.CreateChallengeRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateChallengeMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Verifies the provided attestation info, returning a signed attestation
* token.
* </pre>
*/
public com.google.cloud.confidentialcomputing.v1.VerifyAttestationResponse verifyAttestation(
com.google.cloud.confidentialcomputing.v1.VerifyAttestationRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getVerifyAttestationMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Verifies whether the provided attestation info is valid, returning a signed
* attestation token if so.
* </pre>
*/
public com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceResponse
verifyConfidentialSpace(
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getVerifyConfidentialSpaceMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Verifies the provided Confidential GKE attestation info, returning a signed
* OIDC token.
* </pre>
*/
public com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeResponse
verifyConfidentialGke(
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getVerifyConfidentialGkeMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service ConfidentialComputing.
*
* <pre>
* Service describing handlers for resources
* </pre>
*/
public static final class ConfidentialComputingBlockingStub
extends io.grpc.stub.AbstractBlockingStub<ConfidentialComputingBlockingStub> {
private ConfidentialComputingBlockingStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ConfidentialComputingBlockingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ConfidentialComputingBlockingStub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates a new Challenge in a given project and location.
* </pre>
*/
public com.google.cloud.confidentialcomputing.v1.Challenge createChallenge(
com.google.cloud.confidentialcomputing.v1.CreateChallengeRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateChallengeMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Verifies the provided attestation info, returning a signed attestation
* token.
* </pre>
*/
public com.google.cloud.confidentialcomputing.v1.VerifyAttestationResponse verifyAttestation(
com.google.cloud.confidentialcomputing.v1.VerifyAttestationRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getVerifyAttestationMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Verifies whether the provided attestation info is valid, returning a signed
* attestation token if so.
* </pre>
*/
public com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceResponse
verifyConfidentialSpace(
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getVerifyConfidentialSpaceMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Verifies the provided Confidential GKE attestation info, returning a signed
* OIDC token.
* </pre>
*/
public com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeResponse
verifyConfidentialGke(
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getVerifyConfidentialGkeMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do ListenableFuture-style rpc calls to service
* ConfidentialComputing.
*
* <pre>
* Service describing handlers for resources
* </pre>
*/
public static final class ConfidentialComputingFutureStub
extends io.grpc.stub.AbstractFutureStub<ConfidentialComputingFutureStub> {
private ConfidentialComputingFutureStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ConfidentialComputingFutureStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ConfidentialComputingFutureStub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates a new Challenge in a given project and location.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.confidentialcomputing.v1.Challenge>
createChallenge(com.google.cloud.confidentialcomputing.v1.CreateChallengeRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getCreateChallengeMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Verifies the provided attestation info, returning a signed attestation
* token.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.confidentialcomputing.v1.VerifyAttestationResponse>
verifyAttestation(
com.google.cloud.confidentialcomputing.v1.VerifyAttestationRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getVerifyAttestationMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Verifies whether the provided attestation info is valid, returning a signed
* attestation token if so.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceResponse>
verifyConfidentialSpace(
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getVerifyConfidentialSpaceMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Verifies the provided Confidential GKE attestation info, returning a signed
* OIDC token.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeResponse>
verifyConfidentialGke(
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getVerifyConfidentialGkeMethod(), getCallOptions()), request);
}
}
private static final int METHODID_CREATE_CHALLENGE = 0;
private static final int METHODID_VERIFY_ATTESTATION = 1;
private static final int METHODID_VERIFY_CONFIDENTIAL_SPACE = 2;
private static final int METHODID_VERIFY_CONFIDENTIAL_GKE = 3;
private static final class MethodHandlers<Req, Resp>
implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AsyncService serviceImpl;
private final int methodId;
MethodHandlers(AsyncService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_CREATE_CHALLENGE:
serviceImpl.createChallenge(
(com.google.cloud.confidentialcomputing.v1.CreateChallengeRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.confidentialcomputing.v1.Challenge>)
responseObserver);
break;
case METHODID_VERIFY_ATTESTATION:
serviceImpl.verifyAttestation(
(com.google.cloud.confidentialcomputing.v1.VerifyAttestationRequest) request,
(io.grpc.stub.StreamObserver<
com.google.cloud.confidentialcomputing.v1.VerifyAttestationResponse>)
responseObserver);
break;
case METHODID_VERIFY_CONFIDENTIAL_SPACE:
serviceImpl.verifyConfidentialSpace(
(com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceRequest) request,
(io.grpc.stub.StreamObserver<
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceResponse>)
responseObserver);
break;
case METHODID_VERIFY_CONFIDENTIAL_GKE:
serviceImpl.verifyConfidentialGke(
(com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeRequest) request,
(io.grpc.stub.StreamObserver<
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeResponse>)
responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getCreateChallengeMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.confidentialcomputing.v1.CreateChallengeRequest,
com.google.cloud.confidentialcomputing.v1.Challenge>(
service, METHODID_CREATE_CHALLENGE)))
.addMethod(
getVerifyAttestationMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.confidentialcomputing.v1.VerifyAttestationRequest,
com.google.cloud.confidentialcomputing.v1.VerifyAttestationResponse>(
service, METHODID_VERIFY_ATTESTATION)))
.addMethod(
getVerifyConfidentialSpaceMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceRequest,
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialSpaceResponse>(
service, METHODID_VERIFY_CONFIDENTIAL_SPACE)))
.addMethod(
getVerifyConfidentialGkeMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeRequest,
com.google.cloud.confidentialcomputing.v1.VerifyConfidentialGkeResponse>(
service, METHODID_VERIFY_CONFIDENTIAL_GKE)))
.build();
}
private abstract static class ConfidentialComputingBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier,
io.grpc.protobuf.ProtoServiceDescriptorSupplier {
ConfidentialComputingBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.cloud.confidentialcomputing.v1.ServiceProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("ConfidentialComputing");
}
}
private static final class ConfidentialComputingFileDescriptorSupplier
extends ConfidentialComputingBaseDescriptorSupplier {
ConfidentialComputingFileDescriptorSupplier() {}
}
private static final class ConfidentialComputingMethodDescriptorSupplier
extends ConfidentialComputingBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
ConfidentialComputingMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (ConfidentialComputingGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor =
result =
io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new ConfidentialComputingFileDescriptorSupplier())
.addMethod(getCreateChallengeMethod())
.addMethod(getVerifyAttestationMethod())
.addMethod(getVerifyConfidentialSpaceMethod())
.addMethod(getVerifyConfidentialGkeMethod())
.build();
}
}
}
return result;
}
}
|
apache/seatunnel | 36,080 | seatunnel-connectors-v2/connector-jdbc/src/test/java/org/apache/seatunnel/connectors/seatunnel/jdbc/utils/JdbcCatalogUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.seatunnel.connectors.seatunnel.jdbc.utils;
import org.apache.seatunnel.api.configuration.ReadonlyConfig;
import org.apache.seatunnel.api.options.ConnectorCommonOptions;
import org.apache.seatunnel.api.table.catalog.Catalog;
import org.apache.seatunnel.api.table.catalog.CatalogTable;
import org.apache.seatunnel.api.table.catalog.Column;
import org.apache.seatunnel.api.table.catalog.ConstraintKey;
import org.apache.seatunnel.api.table.catalog.PhysicalColumn;
import org.apache.seatunnel.api.table.catalog.PrimaryKey;
import org.apache.seatunnel.api.table.catalog.TableIdentifier;
import org.apache.seatunnel.api.table.catalog.TablePath;
import org.apache.seatunnel.api.table.catalog.TableSchema;
import org.apache.seatunnel.api.table.catalog.exception.CatalogException;
import org.apache.seatunnel.api.table.catalog.exception.DatabaseAlreadyExistException;
import org.apache.seatunnel.api.table.catalog.exception.DatabaseNotExistException;
import org.apache.seatunnel.api.table.catalog.exception.TableAlreadyExistException;
import org.apache.seatunnel.api.table.catalog.exception.TableNotExistException;
import org.apache.seatunnel.api.table.type.BasicType;
import org.apache.seatunnel.api.table.type.DecimalType;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.spy;
public class JdbcCatalogUtilsTest {
private static final CatalogTable DEFAULT_TABLE =
CatalogTable.of(
TableIdentifier.of("mysql-1", "database-x", null, "table-x"),
TableSchema.builder()
.column(
PhysicalColumn.of(
"f1",
BasicType.LONG_TYPE,
null,
false,
null,
"f1 comment",
"int unsigned",
false,
false,
null,
null,
null))
.column(
PhysicalColumn.of(
"f2",
BasicType.STRING_TYPE,
10,
false,
null,
"f2 comment",
"varchar(10)",
false,
false,
null,
null,
null))
.column(
PhysicalColumn.of(
"f3",
BasicType.STRING_TYPE,
20,
false,
null,
"f3 comment",
"varchar(20)",
false,
false,
null,
null,
null))
.primaryKey(PrimaryKey.of("pk1", Arrays.asList("f1")))
.constraintKey(
ConstraintKey.of(
ConstraintKey.ConstraintType.UNIQUE_KEY,
"uk1",
Arrays.asList(
ConstraintKey.ConstraintKeyColumn.of(
"f2", ConstraintKey.ColumnSortType.ASC),
ConstraintKey.ConstraintKeyColumn.of(
"f3",
ConstraintKey.ColumnSortType.ASC))))
.build(),
Collections.emptyMap(),
Collections.singletonList("f2"),
null);
@Test
public void testColumnEqualsMerge() {
CatalogTable tableOfQuery =
CatalogTable.of(
TableIdentifier.of("default", null, null, "default"),
TableSchema.builder()
.column(
PhysicalColumn.of(
"f2",
BasicType.STRING_TYPE,
10,
true,
null,
null,
null,
false,
false,
null,
null,
null))
.column(
PhysicalColumn.of(
"f3",
BasicType.STRING_TYPE,
20,
false,
null,
null,
null,
false,
false,
null,
null,
null))
.column(
PhysicalColumn.of(
"f1",
BasicType.LONG_TYPE,
null,
true,
null,
null,
null,
false,
false,
null,
null,
null))
.build(),
Collections.emptyMap(),
Collections.emptyList(),
null);
CatalogTable mergeTable = JdbcCatalogUtils.mergeCatalogTable(DEFAULT_TABLE, tableOfQuery);
Assertions.assertEquals(DEFAULT_TABLE.getTableId(), mergeTable.getTableId());
Assertions.assertEquals(DEFAULT_TABLE.getOptions(), mergeTable.getOptions());
Assertions.assertEquals(DEFAULT_TABLE.getComment(), mergeTable.getComment());
Assertions.assertEquals(DEFAULT_TABLE.getCatalogName(), mergeTable.getCatalogName());
Assertions.assertNotEquals(DEFAULT_TABLE.getTableSchema(), mergeTable.getTableSchema());
Assertions.assertEquals(
DEFAULT_TABLE.getTableSchema().getPrimaryKey(),
mergeTable.getTableSchema().getPrimaryKey());
Assertions.assertEquals(
DEFAULT_TABLE.getTableSchema().getConstraintKeys(),
mergeTable.getTableSchema().getConstraintKeys());
Map<String, Column> columnMap =
DEFAULT_TABLE.getTableSchema().getColumns().stream()
.collect(Collectors.toMap(e -> e.getName(), e -> e));
List<Column> sortByQueryColumns =
tableOfQuery.getTableSchema().getColumns().stream()
.map(e -> columnMap.get(e.getName()))
.collect(Collectors.toList());
Assertions.assertEquals(sortByQueryColumns, mergeTable.getTableSchema().getColumns());
}
@Test
public void testColumnIncludeMerge() {
CatalogTable tableOfQuery =
CatalogTable.of(
TableIdentifier.of("default", null, null, "default"),
TableSchema.builder()
.column(
PhysicalColumn.of(
"f1",
BasicType.LONG_TYPE,
null,
true,
null,
null,
null,
false,
false,
null,
null,
null))
.column(
PhysicalColumn.of(
"f3",
BasicType.STRING_TYPE,
20,
false,
null,
null,
null,
false,
false,
null,
null,
null))
.build(),
Collections.emptyMap(),
Collections.emptyList(),
null);
CatalogTable mergeTable = JdbcCatalogUtils.mergeCatalogTable(DEFAULT_TABLE, tableOfQuery);
Assertions.assertEquals(DEFAULT_TABLE.getTableId(), mergeTable.getTableId());
Assertions.assertEquals(
DEFAULT_TABLE.getTableSchema().getPrimaryKey(),
mergeTable.getTableSchema().getPrimaryKey());
Assertions.assertEquals(
DEFAULT_TABLE.getTableSchema().getColumns().stream()
.filter(c -> Arrays.asList("f1", "f3").contains(c.getName()))
.collect(Collectors.toList()),
mergeTable.getTableSchema().getColumns());
Assertions.assertTrue(mergeTable.getPartitionKeys().isEmpty());
Assertions.assertTrue(mergeTable.getTableSchema().getConstraintKeys().isEmpty());
}
@Test
public void testColumnNotIncludeMerge() {
CatalogTable tableOfQuery =
CatalogTable.of(
TableIdentifier.of("default", null, null, "default"),
TableSchema.builder()
.column(
PhysicalColumn.of(
"f1",
BasicType.LONG_TYPE,
null,
true,
null,
"f1 comment",
null,
false,
false,
null,
null,
null))
.column(
PhysicalColumn.of(
"f2",
BasicType.STRING_TYPE,
10,
true,
null,
"f2 comment",
null,
false,
false,
null,
null,
null))
.column(
PhysicalColumn.of(
"f3",
BasicType.STRING_TYPE,
20,
false,
null,
"f3 comment",
null,
false,
false,
null,
null,
null))
.column(
PhysicalColumn.of(
"f4",
BasicType.STRING_TYPE,
20,
false,
null,
null,
null,
false,
false,
null,
null,
null))
.build(),
Collections.emptyMap(),
Collections.emptyList(),
null);
CatalogTable mergeTable = JdbcCatalogUtils.mergeCatalogTable(DEFAULT_TABLE, tableOfQuery);
Assertions.assertEquals(
DEFAULT_TABLE.getTableId().toTablePath(), mergeTable.getTableId().toTablePath());
Assertions.assertEquals(DEFAULT_TABLE.getPartitionKeys(), mergeTable.getPartitionKeys());
Assertions.assertEquals(
DEFAULT_TABLE.getTableSchema().getPrimaryKey(),
mergeTable.getTableSchema().getPrimaryKey());
Assertions.assertEquals(
DEFAULT_TABLE.getTableSchema().getConstraintKeys(),
mergeTable.getTableSchema().getConstraintKeys());
Assertions.assertEquals(
tableOfQuery.getTableId().getCatalogName(),
mergeTable.getTableId().getCatalogName());
Assertions.assertEquals(
tableOfQuery.getTableSchema().getColumns(),
mergeTable.getTableSchema().getColumns());
}
@Test
public void testDecimalColumnMerge() {
CatalogTable tableOfQuery =
CatalogTable.of(
TableIdentifier.of("default", null, null, "default"),
TableSchema.builder()
.column(
PhysicalColumn.of(
"f1",
new DecimalType(10, 1),
null,
true,
null,
null,
null,
false,
false,
null,
null,
null))
.build(),
Collections.emptyMap(),
Collections.emptyList(),
null);
CatalogTable tableOfPath =
CatalogTable.of(
TableIdentifier.of("default", null, null, "default"),
TableSchema.builder()
.column(
PhysicalColumn.of(
"f1",
new DecimalType(10, 2),
null,
true,
null,
null,
null,
false,
false,
null,
null,
null))
.build(),
Collections.emptyMap(),
Collections.emptyList(),
null);
CatalogTable mergeTable = JdbcCatalogUtils.mergeCatalogTable(tableOfPath, tableOfQuery);
// When column type is decimal, the precision and scale should not affect the merge result
Assertions.assertEquals(
tableOfPath.getTableSchema().getColumns().get(0),
mergeTable.getTableSchema().getColumns().get(0));
}
@Test
public void testCatalogGetTablesWithMysqlPattern() throws Exception {
TestCatalog testCatalog = spy(new TestCatalog());
TableSchema tableSchema =
TableSchema.builder()
.column(PhysicalColumn.of("id", BasicType.INT_TYPE, 0, true, null, null))
.build();
List<String> allDatabases = new ArrayList<>(Arrays.asList("test", "prod", "dev"));
Map<String, List<String>> databaseTables = new HashMap<>();
databaseTables.put(
"test", Arrays.asList("table1", "table2", "table3", "table123", "tableabc"));
databaseTables.put("prod", Arrays.asList("prod_table1", "prod_table2", "prod_table3"));
databaseTables.put("dev", Arrays.asList("dev_table1", "dev_table2"));
Map<TablePath, CatalogTable> tableMap = new HashMap<>();
for (String database : allDatabases) {
for (String tableName : databaseTables.get(database)) {
TablePath tablePath = TablePath.of(database, null, tableName);
CatalogTable table =
CatalogTable.of(
TableIdentifier.of(database, null, null, tableName),
tableSchema,
Collections.emptyMap(),
Collections.emptyList(),
"Test " + tableName);
tableMap.put(tablePath, table);
}
}
doAnswer(invocation -> new ArrayList<>(allDatabases)).when(testCatalog).listDatabases();
for (String database : allDatabases) {
doReturn(true).when(testCatalog).databaseExists(eq(database));
}
for (String database : allDatabases) {
doReturn(new ArrayList<>(databaseTables.get(database)))
.when(testCatalog)
.listTables(eq(database));
}
for (String database : allDatabases) {
List<TablePath> paths =
databaseTables.get(database).stream()
.map(tableName -> TablePath.of(database, null, tableName))
.collect(Collectors.toList());
doReturn(paths).when(testCatalog).listTablePaths(eq(database));
}
doReturn(true).when(testCatalog).tableExists(any(TablePath.class));
doAnswer(
invocation -> {
TablePath path = invocation.getArgument(0);
CatalogTable table = tableMap.get(path);
if (table == null) {
throw new TableNotExistException("test", path);
}
return table;
})
.when(testCatalog)
.getTable(any(TablePath.class));
testMysqlRegexPattern(
testCatalog,
"test",
"test.table\\d+",
Arrays.asList("table1", "table2", "table3", "table123"));
testMysqlRegexPattern(
testCatalog,
".*",
".*table1",
Arrays.asList("table1", "prod_table1", "dev_table1"));
testMysqlRegexPattern(
testCatalog,
"prod",
"prod.prod_table[1-2]",
Arrays.asList("prod_table1", "prod_table2"));
testMysqlRegexPattern(testCatalog, ".*", "nonexistent.*", Collections.emptyList());
}
private void testMysqlRegexPattern(
Catalog catalog,
String databasePattern,
String tablePattern,
List<String> expectedTablePaths) {
Map<String, Object> configMap = new HashMap<>();
configMap.put(ConnectorCommonOptions.DATABASE_PATTERN.key(), databasePattern);
configMap.put(ConnectorCommonOptions.TABLE_PATTERN.key(), tablePattern);
ReadonlyConfig config = ReadonlyConfig.fromMap(configMap);
List<CatalogTable> tables = catalog.getTables(config);
List<String> actualTablePaths =
tables.stream()
.map(t -> t.getTableId().toTablePath().toString())
.collect(Collectors.toList());
Set<String> actualTablePathSet = new HashSet<>(actualTablePaths);
Set<String> expectedTablePathSet = new HashSet<>(expectedTablePaths);
Assertions.assertEquals(
expectedTablePathSet.size(),
actualTablePathSet.size(),
"Expected "
+ expectedTablePathSet.size()
+ " tables for pattern: "
+ databasePattern
+ "."
+ tablePattern);
if (!expectedTablePaths.isEmpty()) {
for (String expectedTablePath : expectedTablePaths) {
Assertions.assertTrue(
actualTablePathSet.contains(expectedTablePath),
"Expected table path "
+ expectedTablePath
+ " not found for pattern: "
+ databasePattern
+ "."
+ tablePattern);
}
} else {
Assertions.assertTrue(
actualTablePathSet.isEmpty(),
"Expected empty result for pattern: " + databasePattern + "." + tablePattern);
}
}
@Test
public void testCatalogGetTablesWithPostgresPattern() throws Exception {
String catalogName = "postgres_catalog";
TestCatalog postgresCatalog = spy(new TestCatalog());
doReturn(catalogName).when(postgresCatalog).name();
TableSchema tableSchema =
TableSchema.builder()
.column(PhysicalColumn.of("id", BasicType.INT_TYPE, 0, true, null, null))
.build();
List<String> allDatabases = new ArrayList<>(Arrays.asList("postgres", "test_db", "dev_db"));
Map<String, List<String>> databaseSchemas = new HashMap<>();
databaseSchemas.put("postgres", Arrays.asList("public", "schema1", "schema2"));
databaseSchemas.put("test_db", Arrays.asList("public", "test_schema"));
databaseSchemas.put("dev_db", Arrays.asList("public", "dev_schema"));
Map<String, Map<String, List<String>>> schemasTables = new HashMap<>();
Map<String, List<String>> postgresSchemas = new HashMap<>();
postgresSchemas.put("public", Arrays.asList("users", "orders", "products", "customers"));
postgresSchemas.put("schema1", Arrays.asList("table1", "table2", "table3"));
postgresSchemas.put("schema2", Arrays.asList("log_2021", "log_2022", "log_2023"));
schemasTables.put("postgres", postgresSchemas);
Map<String, List<String>> testDbSchemas = new HashMap<>();
testDbSchemas.put("public", Arrays.asList("test_table1", "test_table2"));
testDbSchemas.put("test_schema", Arrays.asList("data_table1", "data_table2"));
schemasTables.put("test_db", testDbSchemas);
Map<String, List<String>> devDbSchemas = new HashMap<>();
devDbSchemas.put("public", Arrays.asList("dev_table1", "dev_table2"));
devDbSchemas.put("dev_schema", Arrays.asList("temp_table1", "temp_table2"));
schemasTables.put("dev_db", devDbSchemas);
Map<TablePath, CatalogTable> tableMap = new HashMap<>();
for (String database : allDatabases) {
for (String schema : databaseSchemas.get(database)) {
for (String tableName : schemasTables.get(database).get(schema)) {
TablePath tablePath = TablePath.of(database, schema, tableName);
CatalogTable table =
CatalogTable.of(
TableIdentifier.of(catalogName, database, schema, tableName),
tableSchema,
Collections.emptyMap(),
Collections.emptyList(),
"Test " + tableName);
tableMap.put(tablePath, table);
}
}
}
doAnswer(invocation -> new ArrayList<>(allDatabases)).when(postgresCatalog).listDatabases();
for (String database : allDatabases) {
doReturn(true).when(postgresCatalog).databaseExists(eq(database));
}
for (String database : allDatabases) {
for (String schema : databaseSchemas.get(database)) {
List<String> tables = schemasTables.get(database).get(schema);
doReturn(new ArrayList<>(tables))
.when(postgresCatalog)
.listTables(eq(database + "." + schema));
}
}
for (String database : allDatabases) {
List<TablePath> paths = new ArrayList<>();
for (String schema : databaseSchemas.get(database)) {
for (String tableName : schemasTables.get(database).get(schema)) {
paths.add(TablePath.of(database, schema, tableName));
}
}
doReturn(paths).when(postgresCatalog).listTablePaths(eq(database));
}
doReturn(true).when(postgresCatalog).tableExists(any(TablePath.class));
doAnswer(
invocation -> {
TablePath path = invocation.getArgument(0);
CatalogTable table = tableMap.get(path);
if (table == null) {
throw new TableNotExistException("test", path);
}
return table;
})
.when(postgresCatalog)
.getTable(any(TablePath.class));
testPostgresRegexPattern(
postgresCatalog,
"postgres",
"postgres\\.public\\..*",
Arrays.asList(
"postgres.public.users",
"postgres.public.orders",
"postgres.public.products",
"postgres.public.customers"));
testPostgresRegexPattern(
postgresCatalog,
".*",
".*\\.public\\..*table.*",
Arrays.asList(
"test_db.public.test_table1",
"test_db.public.test_table2",
"dev_db.public.dev_table1",
"dev_db.public.dev_table2"));
testPostgresRegexPattern(
postgresCatalog,
".*",
".*\\..*\\.log_\\d{4}",
Arrays.asList(
"postgres.schema2.log_2021",
"postgres.schema2.log_2022",
"postgres.schema2.log_2023"));
testPostgresRegexPattern(
postgresCatalog,
"test_db",
"test_db\\..*\\..*",
Arrays.asList(
"test_db.public.test_table1",
"test_db.public.test_table2",
"test_db.test_schema.data_table1",
"test_db.test_schema.data_table2"));
testPostgresRegexPattern(
postgresCatalog, ".*", ".*\\..*\\.nonexistent.*", Collections.emptyList());
}
private void testPostgresRegexPattern(
Catalog catalog,
String databasePattern,
String tablePattern,
List<String> expectedTablePaths) {
Map<String, Object> configMap = new HashMap<>();
configMap.put(ConnectorCommonOptions.DATABASE_PATTERN.key(), databasePattern);
configMap.put(ConnectorCommonOptions.TABLE_PATTERN.key(), tablePattern);
ReadonlyConfig config = ReadonlyConfig.fromMap(configMap);
List<CatalogTable> tables = catalog.getTables(config);
List<String> actualTablePaths =
tables.stream()
.map(
t -> {
TableIdentifier id = t.getTableId();
return id.getDatabaseName()
+ "."
+ id.getSchemaName()
+ "."
+ id.getTableName();
})
.collect(Collectors.toList());
Set<String> actualTablePathSet = new HashSet<>(actualTablePaths);
Set<String> expectedTablePathSet = new HashSet<>(expectedTablePaths);
Assertions.assertEquals(
expectedTablePathSet.size(),
actualTablePathSet.size(),
"Expected "
+ expectedTablePathSet.size()
+ " tables for pattern: "
+ databasePattern
+ "."
+ tablePattern);
if (!expectedTablePaths.isEmpty()) {
for (String expectedTablePath : expectedTablePaths) {
Assertions.assertTrue(
actualTablePathSet.contains(expectedTablePath),
"Expected table path "
+ expectedTablePath
+ " not found for pattern: "
+ databasePattern
+ "."
+ tablePattern);
}
} else {
Assertions.assertTrue(
actualTablePathSet.isEmpty(),
"Expected empty result for pattern: " + databasePattern + "." + tablePattern);
}
}
private static class TestCatalog implements Catalog {
@Override
public void open() throws CatalogException {}
@Override
public void close() throws CatalogException {}
@Override
public String name() {
return "TestCatalog";
}
@Override
public String getDefaultDatabase() throws CatalogException {
return "test";
}
@Override
public boolean databaseExists(String databaseName) throws CatalogException {
return false;
}
@Override
public List<String> listDatabases() throws CatalogException {
return Collections.emptyList();
}
@Override
public List<String> listTables(String databaseName)
throws CatalogException, DatabaseNotExistException {
return Collections.emptyList();
}
@Override
public boolean tableExists(TablePath tablePath) throws CatalogException {
return false;
}
@Override
public CatalogTable getTable(TablePath tablePath)
throws CatalogException, TableNotExistException {
throw new TableNotExistException("test", tablePath);
}
@Override
public void createTable(TablePath tablePath, CatalogTable table, boolean ignoreIfExists)
throws TableAlreadyExistException, DatabaseNotExistException, CatalogException {}
@Override
public void dropTable(TablePath tablePath, boolean ignoreIfNotExists)
throws TableNotExistException, CatalogException {}
@Override
public void createDatabase(TablePath tablePath, boolean ignoreIfExists)
throws DatabaseAlreadyExistException, CatalogException {}
@Override
public void dropDatabase(TablePath tablePath, boolean ignoreIfNotExists)
throws DatabaseNotExistException, CatalogException {}
}
}
|
googleapis/google-api-java-client-services | 36,335 | clients/google-api-services-bigquery/v2/1.31.0/com/google/api/services/bigquery/model/JobConfigurationQuery.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.bigquery.model;
/**
* Model definition for JobConfigurationQuery.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the BigQuery API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class JobConfigurationQuery extends com.google.api.client.json.GenericJson {
/**
* [Optional] If true and query uses legacy SQL dialect, allows the query to produce arbitrarily
* large result tables at a slight cost in performance. Requires destinationTable to be set. For
* standard SQL queries, this flag is ignored and large results are always allowed. However, you
* must still set destinationTable when result size exceeds the allowed maximum response size.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean allowLargeResults;
/**
* [Beta] Clustering specification for the destination table. Must be specified with time-based
* partitioning, data in the table will be first partitioned and subsequently clustered.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Clustering clustering;
/**
* Connection properties.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<ConnectionProperty> connectionProperties;
static {
// hack to force ProGuard to consider ConnectionProperty used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(ConnectionProperty.class);
}
/**
* [Optional] Specifies whether the job is allowed to create new tables. The following values are
* supported: CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table.
* CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in
* the job result. The default value is CREATE_IF_NEEDED. Creation, truncation and append actions
* occur as one atomic update upon job completion.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String createDisposition;
/**
* If true, creates a new session, where session id will be a server generated random id. If
* false, runs query with an existing session_id passed in ConnectionProperty, otherwise runs
* query in non-session mode.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean createSession;
/**
* [Optional] Specifies the default dataset to use for unqualified table names in the query. Note
* that this does not alter behavior of unqualified dataset names.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DatasetReference defaultDataset;
/**
* Custom encryption configuration (e.g., Cloud KMS keys).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private EncryptionConfiguration destinationEncryptionConfiguration;
/**
* [Optional] Describes the table where the query results should be stored. If not present, a new
* table will be created to store the results. This property must be set for large results that
* exceed the maximum response size.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private TableReference destinationTable;
/**
* [Optional] If true and query uses legacy SQL dialect, flattens all nested and repeated fields
* in the query results. allowLargeResults must be true if this is set to false. For standard SQL
* queries, this flag is ignored and results are never flattened.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean flattenResults;
/**
* [Optional] Limits the billing tier for this job. Queries that have resource usage beyond this
* tier will fail (without incurring a charge). If unspecified, this will be set to your project
* default.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer maximumBillingTier;
/**
* [Optional] Limits the bytes billed for this job. Queries that will have bytes billed beyond
* this limit will fail (without incurring a charge). If unspecified, this will be set to your
* project default.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long maximumBytesBilled;
/**
* Standard SQL only. Set to POSITIONAL to use positional (?) query parameters or to NAMED to use
* named (@myparam) query parameters in this query.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String parameterMode;
/**
* [Deprecated] This property is deprecated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean preserveNulls;
/**
* [Optional] Specifies a priority for the query. Possible values include INTERACTIVE and BATCH.
* The default value is INTERACTIVE.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String priority;
/**
* [Required] SQL query text to execute. The useLegacySql field can be used to indicate whether
* the query uses legacy SQL or standard SQL.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String query;
/**
* Query parameters for standard SQL queries.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<QueryParameter> queryParameters;
/**
* [TrustedTester] Range partitioning specification for this table. Only one of timePartitioning
* and rangePartitioning should be specified.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private RangePartitioning rangePartitioning;
/**
* Allows the schema of the destination table to be updated as a side effect of the query job.
* Schema update options are supported in two cases: when writeDisposition is WRITE_APPEND; when
* writeDisposition is WRITE_TRUNCATE and the destination table is a partition of a table,
* specified by partition decorators. For normal tables, WRITE_TRUNCATE will always overwrite the
* schema. One or more of the following values are specified: ALLOW_FIELD_ADDITION: allow adding a
* nullable field to the schema. ALLOW_FIELD_RELAXATION: allow relaxing a required field in the
* original schema to nullable.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> schemaUpdateOptions;
/**
* [Optional] If querying an external data source outside of BigQuery, describes the data format,
* location and other properties of the data source. By defining these properties, the data source
* can then be queried as if it were a standard BigQuery table.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, ExternalDataConfiguration> tableDefinitions;
static {
// hack to force ProGuard to consider ExternalDataConfiguration used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(ExternalDataConfiguration.class);
}
/**
* Time-based partitioning specification for the destination table. Only one of timePartitioning
* and rangePartitioning should be specified.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private TimePartitioning timePartitioning;
/**
* Specifies whether to use BigQuery's legacy SQL dialect for this query. The default value is
* true. If set to false, the query will use BigQuery's standard SQL:
* https://cloud.google.com/bigquery/sql-reference/ When useLegacySql is set to false, the value
* of flattenResults is ignored; query will be run as if flattenResults is false.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean useLegacySql;
/**
* [Optional] Whether to look for the result in the query cache. The query cache is a best-effort
* cache that will be flushed whenever tables in the query are modified. Moreover, the query cache
* is only available when a query does not have a destination table specified. The default value
* is true.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean useQueryCache;
/**
* Describes user-defined function resources used in the query.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<UserDefinedFunctionResource> userDefinedFunctionResources;
/**
* [Optional] Specifies the action that occurs if the destination table already exists. The
* following values are supported: WRITE_TRUNCATE: If the table already exists, BigQuery
* overwrites the table data and uses the schema from the query result. WRITE_APPEND: If the table
* already exists, BigQuery appends the data to the table. WRITE_EMPTY: If the table already
* exists and contains data, a 'duplicate' error is returned in the job result. The default value
* is WRITE_EMPTY. Each action is atomic and only occurs if BigQuery is able to complete the job
* successfully. Creation, truncation and append actions occur as one atomic update upon job
* completion.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String writeDisposition;
/**
* [Optional] If true and query uses legacy SQL dialect, allows the query to produce arbitrarily
* large result tables at a slight cost in performance. Requires destinationTable to be set. For
* standard SQL queries, this flag is ignored and large results are always allowed. However, you
* must still set destinationTable when result size exceeds the allowed maximum response size.
* @return value or {@code null} for none
*/
public java.lang.Boolean getAllowLargeResults() {
return allowLargeResults;
}
/**
* [Optional] If true and query uses legacy SQL dialect, allows the query to produce arbitrarily
* large result tables at a slight cost in performance. Requires destinationTable to be set. For
* standard SQL queries, this flag is ignored and large results are always allowed. However, you
* must still set destinationTable when result size exceeds the allowed maximum response size.
* @param allowLargeResults allowLargeResults or {@code null} for none
*/
public JobConfigurationQuery setAllowLargeResults(java.lang.Boolean allowLargeResults) {
this.allowLargeResults = allowLargeResults;
return this;
}
/**
* Convenience method that returns only {@link Boolean#TRUE} or {@link Boolean#FALSE}.
*
* <p>
* Boolean properties can have four possible values:
* {@code null}, {@link com.google.api.client.util.Data#NULL_BOOLEAN}, {@link Boolean#TRUE}
* or {@link Boolean#FALSE}.
* </p>
*
* <p>
* This method returns {@link Boolean#TRUE} if the default of the property is {@link Boolean#TRUE}
* and it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}.
* {@link Boolean#FALSE} is returned if the default of the property is {@link Boolean#FALSE} and
* it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}.
* </p>
*
* <p>
*[ Optional] If true and query uses legacy SQL dialect, allows the query to produce arbitrarily large
[ result tables at a slight cost in performance. Requires destinationTable to be set. For standard
[ SQL queries, this flag is ignored and large results are always allowed. However, you must still
[ set destinationTable when result size exceeds the allowed maximum response size.
* </p>
*/
public boolean isAllowLargeResults() {
if (allowLargeResults == null || allowLargeResults == com.google.api.client.util.Data.NULL_BOOLEAN) {
return false;
}
return allowLargeResults;
}
/**
* [Beta] Clustering specification for the destination table. Must be specified with time-based
* partitioning, data in the table will be first partitioned and subsequently clustered.
* @return value or {@code null} for none
*/
public Clustering getClustering() {
return clustering;
}
/**
* [Beta] Clustering specification for the destination table. Must be specified with time-based
* partitioning, data in the table will be first partitioned and subsequently clustered.
* @param clustering clustering or {@code null} for none
*/
public JobConfigurationQuery setClustering(Clustering clustering) {
this.clustering = clustering;
return this;
}
/**
* Connection properties.
* @return value or {@code null} for none
*/
public java.util.List<ConnectionProperty> getConnectionProperties() {
return connectionProperties;
}
/**
* Connection properties.
* @param connectionProperties connectionProperties or {@code null} for none
*/
public JobConfigurationQuery setConnectionProperties(java.util.List<ConnectionProperty> connectionProperties) {
this.connectionProperties = connectionProperties;
return this;
}
/**
* [Optional] Specifies whether the job is allowed to create new tables. The following values are
* supported: CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table.
* CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in
* the job result. The default value is CREATE_IF_NEEDED. Creation, truncation and append actions
* occur as one atomic update upon job completion.
* @return value or {@code null} for none
*/
public java.lang.String getCreateDisposition() {
return createDisposition;
}
/**
* [Optional] Specifies whether the job is allowed to create new tables. The following values are
* supported: CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table.
* CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in
* the job result. The default value is CREATE_IF_NEEDED. Creation, truncation and append actions
* occur as one atomic update upon job completion.
* @param createDisposition createDisposition or {@code null} for none
*/
public JobConfigurationQuery setCreateDisposition(java.lang.String createDisposition) {
this.createDisposition = createDisposition;
return this;
}
/**
* If true, creates a new session, where session id will be a server generated random id. If
* false, runs query with an existing session_id passed in ConnectionProperty, otherwise runs
* query in non-session mode.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCreateSession() {
return createSession;
}
/**
* If true, creates a new session, where session id will be a server generated random id. If
* false, runs query with an existing session_id passed in ConnectionProperty, otherwise runs
* query in non-session mode.
* @param createSession createSession or {@code null} for none
*/
public JobConfigurationQuery setCreateSession(java.lang.Boolean createSession) {
this.createSession = createSession;
return this;
}
/**
* [Optional] Specifies the default dataset to use for unqualified table names in the query. Note
* that this does not alter behavior of unqualified dataset names.
* @return value or {@code null} for none
*/
public DatasetReference getDefaultDataset() {
return defaultDataset;
}
/**
* [Optional] Specifies the default dataset to use for unqualified table names in the query. Note
* that this does not alter behavior of unqualified dataset names.
* @param defaultDataset defaultDataset or {@code null} for none
*/
public JobConfigurationQuery setDefaultDataset(DatasetReference defaultDataset) {
this.defaultDataset = defaultDataset;
return this;
}
/**
* Custom encryption configuration (e.g., Cloud KMS keys).
* @return value or {@code null} for none
*/
public EncryptionConfiguration getDestinationEncryptionConfiguration() {
return destinationEncryptionConfiguration;
}
/**
* Custom encryption configuration (e.g., Cloud KMS keys).
* @param destinationEncryptionConfiguration destinationEncryptionConfiguration or {@code null} for none
*/
public JobConfigurationQuery setDestinationEncryptionConfiguration(EncryptionConfiguration destinationEncryptionConfiguration) {
this.destinationEncryptionConfiguration = destinationEncryptionConfiguration;
return this;
}
/**
* [Optional] Describes the table where the query results should be stored. If not present, a new
* table will be created to store the results. This property must be set for large results that
* exceed the maximum response size.
* @return value or {@code null} for none
*/
public TableReference getDestinationTable() {
return destinationTable;
}
/**
* [Optional] Describes the table where the query results should be stored. If not present, a new
* table will be created to store the results. This property must be set for large results that
* exceed the maximum response size.
* @param destinationTable destinationTable or {@code null} for none
*/
public JobConfigurationQuery setDestinationTable(TableReference destinationTable) {
this.destinationTable = destinationTable;
return this;
}
/**
* [Optional] If true and query uses legacy SQL dialect, flattens all nested and repeated fields
* in the query results. allowLargeResults must be true if this is set to false. For standard SQL
* queries, this flag is ignored and results are never flattened.
* @return value or {@code null} for none
*/
public java.lang.Boolean getFlattenResults() {
return flattenResults;
}
/**
* [Optional] If true and query uses legacy SQL dialect, flattens all nested and repeated fields
* in the query results. allowLargeResults must be true if this is set to false. For standard SQL
* queries, this flag is ignored and results are never flattened.
* @param flattenResults flattenResults or {@code null} for none
*/
public JobConfigurationQuery setFlattenResults(java.lang.Boolean flattenResults) {
this.flattenResults = flattenResults;
return this;
}
/**
* Convenience method that returns only {@link Boolean#TRUE} or {@link Boolean#FALSE}.
*
* <p>
* Boolean properties can have four possible values:
* {@code null}, {@link com.google.api.client.util.Data#NULL_BOOLEAN}, {@link Boolean#TRUE}
* or {@link Boolean#FALSE}.
* </p>
*
* <p>
* This method returns {@link Boolean#TRUE} if the default of the property is {@link Boolean#TRUE}
* and it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}.
* {@link Boolean#FALSE} is returned if the default of the property is {@link Boolean#FALSE} and
* it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}.
* </p>
*
* <p>
*[ Optional] If true and query uses legacy SQL dialect, flattens all nested and repeated fields in
[ the query results. allowLargeResults must be true if this is set to false. For standard SQL
[ queries, this flag is ignored and results are never flattened.
* </p>
*/
public boolean isFlattenResults() {
if (flattenResults == null || flattenResults == com.google.api.client.util.Data.NULL_BOOLEAN) {
return true;
}
return flattenResults;
}
/**
* [Optional] Limits the billing tier for this job. Queries that have resource usage beyond this
* tier will fail (without incurring a charge). If unspecified, this will be set to your project
* default.
* @return value or {@code null} for none
*/
public java.lang.Integer getMaximumBillingTier() {
return maximumBillingTier;
}
/**
* [Optional] Limits the billing tier for this job. Queries that have resource usage beyond this
* tier will fail (without incurring a charge). If unspecified, this will be set to your project
* default.
* @param maximumBillingTier maximumBillingTier or {@code null} for none
*/
public JobConfigurationQuery setMaximumBillingTier(java.lang.Integer maximumBillingTier) {
this.maximumBillingTier = maximumBillingTier;
return this;
}
/**
* [Optional] Limits the bytes billed for this job. Queries that will have bytes billed beyond
* this limit will fail (without incurring a charge). If unspecified, this will be set to your
* project default.
* @return value or {@code null} for none
*/
public java.lang.Long getMaximumBytesBilled() {
return maximumBytesBilled;
}
/**
* [Optional] Limits the bytes billed for this job. Queries that will have bytes billed beyond
* this limit will fail (without incurring a charge). If unspecified, this will be set to your
* project default.
* @param maximumBytesBilled maximumBytesBilled or {@code null} for none
*/
public JobConfigurationQuery setMaximumBytesBilled(java.lang.Long maximumBytesBilled) {
this.maximumBytesBilled = maximumBytesBilled;
return this;
}
/**
* Standard SQL only. Set to POSITIONAL to use positional (?) query parameters or to NAMED to use
* named (@myparam) query parameters in this query.
* @return value or {@code null} for none
*/
public java.lang.String getParameterMode() {
return parameterMode;
}
/**
* Standard SQL only. Set to POSITIONAL to use positional (?) query parameters or to NAMED to use
* named (@myparam) query parameters in this query.
* @param parameterMode parameterMode or {@code null} for none
*/
public JobConfigurationQuery setParameterMode(java.lang.String parameterMode) {
this.parameterMode = parameterMode;
return this;
}
/**
* [Deprecated] This property is deprecated.
* @return value or {@code null} for none
*/
public java.lang.Boolean getPreserveNulls() {
return preserveNulls;
}
/**
* [Deprecated] This property is deprecated.
* @param preserveNulls preserveNulls or {@code null} for none
*/
public JobConfigurationQuery setPreserveNulls(java.lang.Boolean preserveNulls) {
this.preserveNulls = preserveNulls;
return this;
}
/**
* [Optional] Specifies a priority for the query. Possible values include INTERACTIVE and BATCH.
* The default value is INTERACTIVE.
* @return value or {@code null} for none
*/
public java.lang.String getPriority() {
return priority;
}
/**
* [Optional] Specifies a priority for the query. Possible values include INTERACTIVE and BATCH.
* The default value is INTERACTIVE.
* @param priority priority or {@code null} for none
*/
public JobConfigurationQuery setPriority(java.lang.String priority) {
this.priority = priority;
return this;
}
/**
* [Required] SQL query text to execute. The useLegacySql field can be used to indicate whether
* the query uses legacy SQL or standard SQL.
* @return value or {@code null} for none
*/
public java.lang.String getQuery() {
return query;
}
/**
* [Required] SQL query text to execute. The useLegacySql field can be used to indicate whether
* the query uses legacy SQL or standard SQL.
* @param query query or {@code null} for none
*/
public JobConfigurationQuery setQuery(java.lang.String query) {
this.query = query;
return this;
}
/**
* Query parameters for standard SQL queries.
* @return value or {@code null} for none
*/
public java.util.List<QueryParameter> getQueryParameters() {
return queryParameters;
}
/**
* Query parameters for standard SQL queries.
* @param queryParameters queryParameters or {@code null} for none
*/
public JobConfigurationQuery setQueryParameters(java.util.List<QueryParameter> queryParameters) {
this.queryParameters = queryParameters;
return this;
}
/**
* [TrustedTester] Range partitioning specification for this table. Only one of timePartitioning
* and rangePartitioning should be specified.
* @return value or {@code null} for none
*/
public RangePartitioning getRangePartitioning() {
return rangePartitioning;
}
/**
* [TrustedTester] Range partitioning specification for this table. Only one of timePartitioning
* and rangePartitioning should be specified.
* @param rangePartitioning rangePartitioning or {@code null} for none
*/
public JobConfigurationQuery setRangePartitioning(RangePartitioning rangePartitioning) {
this.rangePartitioning = rangePartitioning;
return this;
}
/**
* Allows the schema of the destination table to be updated as a side effect of the query job.
* Schema update options are supported in two cases: when writeDisposition is WRITE_APPEND; when
* writeDisposition is WRITE_TRUNCATE and the destination table is a partition of a table,
* specified by partition decorators. For normal tables, WRITE_TRUNCATE will always overwrite the
* schema. One or more of the following values are specified: ALLOW_FIELD_ADDITION: allow adding a
* nullable field to the schema. ALLOW_FIELD_RELAXATION: allow relaxing a required field in the
* original schema to nullable.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getSchemaUpdateOptions() {
return schemaUpdateOptions;
}
/**
* Allows the schema of the destination table to be updated as a side effect of the query job.
* Schema update options are supported in two cases: when writeDisposition is WRITE_APPEND; when
* writeDisposition is WRITE_TRUNCATE and the destination table is a partition of a table,
* specified by partition decorators. For normal tables, WRITE_TRUNCATE will always overwrite the
* schema. One or more of the following values are specified: ALLOW_FIELD_ADDITION: allow adding a
* nullable field to the schema. ALLOW_FIELD_RELAXATION: allow relaxing a required field in the
* original schema to nullable.
* @param schemaUpdateOptions schemaUpdateOptions or {@code null} for none
*/
public JobConfigurationQuery setSchemaUpdateOptions(java.util.List<java.lang.String> schemaUpdateOptions) {
this.schemaUpdateOptions = schemaUpdateOptions;
return this;
}
/**
* [Optional] If querying an external data source outside of BigQuery, describes the data format,
* location and other properties of the data source. By defining these properties, the data source
* can then be queried as if it were a standard BigQuery table.
* @return value or {@code null} for none
*/
public java.util.Map<String, ExternalDataConfiguration> getTableDefinitions() {
return tableDefinitions;
}
/**
* [Optional] If querying an external data source outside of BigQuery, describes the data format,
* location and other properties of the data source. By defining these properties, the data source
* can then be queried as if it were a standard BigQuery table.
* @param tableDefinitions tableDefinitions or {@code null} for none
*/
public JobConfigurationQuery setTableDefinitions(java.util.Map<String, ExternalDataConfiguration> tableDefinitions) {
this.tableDefinitions = tableDefinitions;
return this;
}
/**
* Time-based partitioning specification for the destination table. Only one of timePartitioning
* and rangePartitioning should be specified.
* @return value or {@code null} for none
*/
public TimePartitioning getTimePartitioning() {
return timePartitioning;
}
/**
* Time-based partitioning specification for the destination table. Only one of timePartitioning
* and rangePartitioning should be specified.
* @param timePartitioning timePartitioning or {@code null} for none
*/
public JobConfigurationQuery setTimePartitioning(TimePartitioning timePartitioning) {
this.timePartitioning = timePartitioning;
return this;
}
/**
* Specifies whether to use BigQuery's legacy SQL dialect for this query. The default value is
* true. If set to false, the query will use BigQuery's standard SQL:
* https://cloud.google.com/bigquery/sql-reference/ When useLegacySql is set to false, the value
* of flattenResults is ignored; query will be run as if flattenResults is false.
* @return value or {@code null} for none
*/
public java.lang.Boolean getUseLegacySql() {
return useLegacySql;
}
/**
* Specifies whether to use BigQuery's legacy SQL dialect for this query. The default value is
* true. If set to false, the query will use BigQuery's standard SQL:
* https://cloud.google.com/bigquery/sql-reference/ When useLegacySql is set to false, the value
* of flattenResults is ignored; query will be run as if flattenResults is false.
* @param useLegacySql useLegacySql or {@code null} for none
*/
public JobConfigurationQuery setUseLegacySql(java.lang.Boolean useLegacySql) {
this.useLegacySql = useLegacySql;
return this;
}
/**
* Convenience method that returns only {@link Boolean#TRUE} or {@link Boolean#FALSE}.
*
* <p>
* Boolean properties can have four possible values:
* {@code null}, {@link com.google.api.client.util.Data#NULL_BOOLEAN}, {@link Boolean#TRUE}
* or {@link Boolean#FALSE}.
* </p>
*
* <p>
* This method returns {@link Boolean#TRUE} if the default of the property is {@link Boolean#TRUE}
* and it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}.
* {@link Boolean#FALSE} is returned if the default of the property is {@link Boolean#FALSE} and
* it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}.
* </p>
*
* <p>
* Specifies whether to use BigQuery's legacy SQL dialect for this query. The default value is true.
If set to false, the query will use BigQuery's standard SQL: https://cloud.google.com/bigquery/sql-
reference/ When useLegacySql is set to false, the value of flattenResults is ignored; query will be
run as if flattenResults is false.
* </p>
*/
public boolean isUseLegacySql() {
if (useLegacySql == null || useLegacySql == com.google.api.client.util.Data.NULL_BOOLEAN) {
return true;
}
return useLegacySql;
}
/**
* [Optional] Whether to look for the result in the query cache. The query cache is a best-effort
* cache that will be flushed whenever tables in the query are modified. Moreover, the query cache
* is only available when a query does not have a destination table specified. The default value
* is true.
* @return value or {@code null} for none
*/
public java.lang.Boolean getUseQueryCache() {
return useQueryCache;
}
/**
* [Optional] Whether to look for the result in the query cache. The query cache is a best-effort
* cache that will be flushed whenever tables in the query are modified. Moreover, the query cache
* is only available when a query does not have a destination table specified. The default value
* is true.
* @param useQueryCache useQueryCache or {@code null} for none
*/
public JobConfigurationQuery setUseQueryCache(java.lang.Boolean useQueryCache) {
this.useQueryCache = useQueryCache;
return this;
}
/**
* Convenience method that returns only {@link Boolean#TRUE} or {@link Boolean#FALSE}.
*
* <p>
* Boolean properties can have four possible values:
* {@code null}, {@link com.google.api.client.util.Data#NULL_BOOLEAN}, {@link Boolean#TRUE}
* or {@link Boolean#FALSE}.
* </p>
*
* <p>
* This method returns {@link Boolean#TRUE} if the default of the property is {@link Boolean#TRUE}
* and it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}.
* {@link Boolean#FALSE} is returned if the default of the property is {@link Boolean#FALSE} and
* it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}.
* </p>
*
* <p>
*[ Optional] Whether to look for the result in the query cache. The query cache is a best-effort
[ cache that will be flushed whenever tables in the query are modified. Moreover, the query cache is
[ only available when a query does not have a destination table specified. The default value is
[ true.
* </p>
*/
public boolean isUseQueryCache() {
if (useQueryCache == null || useQueryCache == com.google.api.client.util.Data.NULL_BOOLEAN) {
return true;
}
return useQueryCache;
}
/**
* Describes user-defined function resources used in the query.
* @return value or {@code null} for none
*/
public java.util.List<UserDefinedFunctionResource> getUserDefinedFunctionResources() {
return userDefinedFunctionResources;
}
/**
* Describes user-defined function resources used in the query.
* @param userDefinedFunctionResources userDefinedFunctionResources or {@code null} for none
*/
public JobConfigurationQuery setUserDefinedFunctionResources(java.util.List<UserDefinedFunctionResource> userDefinedFunctionResources) {
this.userDefinedFunctionResources = userDefinedFunctionResources;
return this;
}
/**
* [Optional] Specifies the action that occurs if the destination table already exists. The
* following values are supported: WRITE_TRUNCATE: If the table already exists, BigQuery
* overwrites the table data and uses the schema from the query result. WRITE_APPEND: If the table
* already exists, BigQuery appends the data to the table. WRITE_EMPTY: If the table already
* exists and contains data, a 'duplicate' error is returned in the job result. The default value
* is WRITE_EMPTY. Each action is atomic and only occurs if BigQuery is able to complete the job
* successfully. Creation, truncation and append actions occur as one atomic update upon job
* completion.
* @return value or {@code null} for none
*/
public java.lang.String getWriteDisposition() {
return writeDisposition;
}
/**
* [Optional] Specifies the action that occurs if the destination table already exists. The
* following values are supported: WRITE_TRUNCATE: If the table already exists, BigQuery
* overwrites the table data and uses the schema from the query result. WRITE_APPEND: If the table
* already exists, BigQuery appends the data to the table. WRITE_EMPTY: If the table already
* exists and contains data, a 'duplicate' error is returned in the job result. The default value
* is WRITE_EMPTY. Each action is atomic and only occurs if BigQuery is able to complete the job
* successfully. Creation, truncation and append actions occur as one atomic update upon job
* completion.
* @param writeDisposition writeDisposition or {@code null} for none
*/
public JobConfigurationQuery setWriteDisposition(java.lang.String writeDisposition) {
this.writeDisposition = writeDisposition;
return this;
}
@Override
public JobConfigurationQuery set(String fieldName, Object value) {
return (JobConfigurationQuery) super.set(fieldName, value);
}
@Override
public JobConfigurationQuery clone() {
return (JobConfigurationQuery) super.clone();
}
}
|
googleapis/google-cloud-java | 36,027 | java-container/proto-google-cloud-container-v1/src/main/java/com/google/container/v1/ListUsableSubnetworksRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/container/v1/cluster_service.proto
// Protobuf Java Version: 3.25.8
package com.google.container.v1;
/**
*
*
* <pre>
* ListUsableSubnetworksRequest requests the list of usable subnetworks
* available to a user for creating clusters.
* </pre>
*
* Protobuf type {@code google.container.v1.ListUsableSubnetworksRequest}
*/
public final class ListUsableSubnetworksRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.container.v1.ListUsableSubnetworksRequest)
ListUsableSubnetworksRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListUsableSubnetworksRequest.newBuilder() to construct.
private ListUsableSubnetworksRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListUsableSubnetworksRequest() {
parent_ = "";
filter_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListUsableSubnetworksRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_ListUsableSubnetworksRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_ListUsableSubnetworksRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1.ListUsableSubnetworksRequest.class,
com.google.container.v1.ListUsableSubnetworksRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* The parent project where subnetworks are usable.
* Specified in the format `projects/*`.
* </pre>
*
* <code>string parent = 1;</code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* The parent project where subnetworks are usable.
* Specified in the format `projects/*`.
* </pre>
*
* <code>string parent = 1;</code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Filtering currently only supports equality on the networkProjectId and must
* be in the form: "networkProjectId=[PROJECTID]", where `networkProjectId`
* is the project which owns the listed subnetworks. This defaults to the
* parent project ID.
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Filtering currently only supports equality on the networkProjectId and must
* be in the form: "networkProjectId=[PROJECTID]", where `networkProjectId`
* is the project which owns the listed subnetworks. This defaults to the
* parent project ID.
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The max number of results per page that should be returned. If the number
* of available results is larger than `page_size`, a `next_page_token` is
* returned which can be used to get the next page of results in subsequent
* requests. Acceptable values are 0 to 500, inclusive. (Default: 500)
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Specifies a page token to use. Set this to the nextPageToken returned by
* previous list requests to get the next page of results.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Specifies a page token to use. Set this to the nextPageToken returned by
* previous list requests to get the next page of results.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, filter_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, filter_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.container.v1.ListUsableSubnetworksRequest)) {
return super.equals(obj);
}
com.google.container.v1.ListUsableSubnetworksRequest other =
(com.google.container.v1.ListUsableSubnetworksRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.container.v1.ListUsableSubnetworksRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1.ListUsableSubnetworksRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1.ListUsableSubnetworksRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1.ListUsableSubnetworksRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1.ListUsableSubnetworksRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1.ListUsableSubnetworksRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1.ListUsableSubnetworksRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1.ListUsableSubnetworksRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1.ListUsableSubnetworksRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.container.v1.ListUsableSubnetworksRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1.ListUsableSubnetworksRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1.ListUsableSubnetworksRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.container.v1.ListUsableSubnetworksRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* ListUsableSubnetworksRequest requests the list of usable subnetworks
* available to a user for creating clusters.
* </pre>
*
* Protobuf type {@code google.container.v1.ListUsableSubnetworksRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.container.v1.ListUsableSubnetworksRequest)
com.google.container.v1.ListUsableSubnetworksRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_ListUsableSubnetworksRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_ListUsableSubnetworksRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1.ListUsableSubnetworksRequest.class,
com.google.container.v1.ListUsableSubnetworksRequest.Builder.class);
}
// Construct using com.google.container.v1.ListUsableSubnetworksRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
filter_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_ListUsableSubnetworksRequest_descriptor;
}
@java.lang.Override
public com.google.container.v1.ListUsableSubnetworksRequest getDefaultInstanceForType() {
return com.google.container.v1.ListUsableSubnetworksRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.container.v1.ListUsableSubnetworksRequest build() {
com.google.container.v1.ListUsableSubnetworksRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.container.v1.ListUsableSubnetworksRequest buildPartial() {
com.google.container.v1.ListUsableSubnetworksRequest result =
new com.google.container.v1.ListUsableSubnetworksRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.container.v1.ListUsableSubnetworksRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.container.v1.ListUsableSubnetworksRequest) {
return mergeFrom((com.google.container.v1.ListUsableSubnetworksRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.container.v1.ListUsableSubnetworksRequest other) {
if (other == com.google.container.v1.ListUsableSubnetworksRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* The parent project where subnetworks are usable.
* Specified in the format `projects/*`.
* </pre>
*
* <code>string parent = 1;</code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The parent project where subnetworks are usable.
* Specified in the format `projects/*`.
* </pre>
*
* <code>string parent = 1;</code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The parent project where subnetworks are usable.
* Specified in the format `projects/*`.
* </pre>
*
* <code>string parent = 1;</code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The parent project where subnetworks are usable.
* Specified in the format `projects/*`.
* </pre>
*
* <code>string parent = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The parent project where subnetworks are usable.
* Specified in the format `projects/*`.
* </pre>
*
* <code>string parent = 1;</code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Filtering currently only supports equality on the networkProjectId and must
* be in the form: "networkProjectId=[PROJECTID]", where `networkProjectId`
* is the project which owns the listed subnetworks. This defaults to the
* parent project ID.
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Filtering currently only supports equality on the networkProjectId and must
* be in the form: "networkProjectId=[PROJECTID]", where `networkProjectId`
* is the project which owns the listed subnetworks. This defaults to the
* parent project ID.
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Filtering currently only supports equality on the networkProjectId and must
* be in the form: "networkProjectId=[PROJECTID]", where `networkProjectId`
* is the project which owns the listed subnetworks. This defaults to the
* parent project ID.
* </pre>
*
* <code>string filter = 2;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Filtering currently only supports equality on the networkProjectId and must
* be in the form: "networkProjectId=[PROJECTID]", where `networkProjectId`
* is the project which owns the listed subnetworks. This defaults to the
* parent project ID.
* </pre>
*
* <code>string filter = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Filtering currently only supports equality on the networkProjectId and must
* be in the form: "networkProjectId=[PROJECTID]", where `networkProjectId`
* is the project which owns the listed subnetworks. This defaults to the
* parent project ID.
* </pre>
*
* <code>string filter = 2;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The max number of results per page that should be returned. If the number
* of available results is larger than `page_size`, a `next_page_token` is
* returned which can be used to get the next page of results in subsequent
* requests. Acceptable values are 0 to 500, inclusive. (Default: 500)
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The max number of results per page that should be returned. If the number
* of available results is larger than `page_size`, a `next_page_token` is
* returned which can be used to get the next page of results in subsequent
* requests. Acceptable values are 0 to 500, inclusive. (Default: 500)
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The max number of results per page that should be returned. If the number
* of available results is larger than `page_size`, a `next_page_token` is
* returned which can be used to get the next page of results in subsequent
* requests. Acceptable values are 0 to 500, inclusive. (Default: 500)
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Specifies a page token to use. Set this to the nextPageToken returned by
* previous list requests to get the next page of results.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Specifies a page token to use. Set this to the nextPageToken returned by
* previous list requests to get the next page of results.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Specifies a page token to use. Set this to the nextPageToken returned by
* previous list requests to get the next page of results.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Specifies a page token to use. Set this to the nextPageToken returned by
* previous list requests to get the next page of results.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Specifies a page token to use. Set this to the nextPageToken returned by
* previous list requests to get the next page of results.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.container.v1.ListUsableSubnetworksRequest)
}
// @@protoc_insertion_point(class_scope:google.container.v1.ListUsableSubnetworksRequest)
private static final com.google.container.v1.ListUsableSubnetworksRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.container.v1.ListUsableSubnetworksRequest();
}
public static com.google.container.v1.ListUsableSubnetworksRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListUsableSubnetworksRequest> PARSER =
new com.google.protobuf.AbstractParser<ListUsableSubnetworksRequest>() {
@java.lang.Override
public ListUsableSubnetworksRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListUsableSubnetworksRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListUsableSubnetworksRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.container.v1.ListUsableSubnetworksRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,106 | java-gkehub/proto-google-cloud-gkehub-v1beta/src/main/java/com/google/cloud/gkehub/configmanagement/v1beta/HierarchyControllerState.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/gkehub/v1beta/configmanagement/configmanagement.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.gkehub.configmanagement.v1beta;
/**
*
*
* <pre>
* State for Hierarchy Controller
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState}
*/
public final class HierarchyControllerState extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState)
HierarchyControllerStateOrBuilder {
private static final long serialVersionUID = 0L;
// Use HierarchyControllerState.newBuilder() to construct.
private HierarchyControllerState(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private HierarchyControllerState() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new HierarchyControllerState();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.configmanagement.v1beta.ConfigManagementProto
.internal_static_google_cloud_gkehub_configmanagement_v1beta_HierarchyControllerState_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.configmanagement.v1beta.ConfigManagementProto
.internal_static_google_cloud_gkehub_configmanagement_v1beta_HierarchyControllerState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState.class,
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState.Builder.class);
}
private int bitField0_;
public static final int VERSION_FIELD_NUMBER = 1;
private com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion version_;
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion version = 1;
* </code>
*
* @return Whether the version field is set.
*/
@java.lang.Override
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion version = 1;
* </code>
*
* @return The version.
*/
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion getVersion() {
return version_ == null
? com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion
.getDefaultInstance()
: version_;
}
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion version = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersionOrBuilder
getVersionOrBuilder() {
return version_ == null
? com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion
.getDefaultInstance()
: version_;
}
public static final int STATE_FIELD_NUMBER = 2;
private com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState state_;
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState state = 2;
* </code>
*
* @return Whether the state field is set.
*/
@java.lang.Override
public boolean hasState() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState state = 2;
* </code>
*
* @return The state.
*/
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState
getState() {
return state_ == null
? com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState
.getDefaultInstance()
: state_;
}
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState state = 2;
* </code>
*/
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentStateOrBuilder
getStateOrBuilder() {
return state_ == null
? com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState
.getDefaultInstance()
: state_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getVersion());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getState());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getVersion());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getState());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState)) {
return super.equals(obj);
}
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState other =
(com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState) obj;
if (hasVersion() != other.hasVersion()) return false;
if (hasVersion()) {
if (!getVersion().equals(other.getVersion())) return false;
}
if (hasState() != other.hasState()) return false;
if (hasState()) {
if (!getState().equals(other.getState())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasVersion()) {
hash = (37 * hash) + VERSION_FIELD_NUMBER;
hash = (53 * hash) + getVersion().hashCode();
}
if (hasState()) {
hash = (37 * hash) + STATE_FIELD_NUMBER;
hash = (53 * hash) + getState().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* State for Hierarchy Controller
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState)
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerStateOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.configmanagement.v1beta.ConfigManagementProto
.internal_static_google_cloud_gkehub_configmanagement_v1beta_HierarchyControllerState_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.configmanagement.v1beta.ConfigManagementProto
.internal_static_google_cloud_gkehub_configmanagement_v1beta_HierarchyControllerState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState.class,
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState.Builder
.class);
}
// Construct using
// com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getVersionFieldBuilder();
getStateFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
version_ = null;
if (versionBuilder_ != null) {
versionBuilder_.dispose();
versionBuilder_ = null;
}
state_ = null;
if (stateBuilder_ != null) {
stateBuilder_.dispose();
stateBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.gkehub.configmanagement.v1beta.ConfigManagementProto
.internal_static_google_cloud_gkehub_configmanagement_v1beta_HierarchyControllerState_descriptor;
}
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState
getDefaultInstanceForType() {
return com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState build() {
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState buildPartial() {
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState result =
new com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.version_ = versionBuilder_ == null ? version_ : versionBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.state_ = stateBuilder_ == null ? state_ : stateBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState) {
return mergeFrom(
(com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState other) {
if (other
== com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState
.getDefaultInstance()) return this;
if (other.hasVersion()) {
mergeVersion(other.getVersion());
}
if (other.hasState()) {
mergeState(other.getState());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getVersionFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getStateFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion version_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion,
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion.Builder,
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersionOrBuilder>
versionBuilder_;
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion version = 1;
* </code>
*
* @return Whether the version field is set.
*/
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion version = 1;
* </code>
*
* @return The version.
*/
public com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion getVersion() {
if (versionBuilder_ == null) {
return version_ == null
? com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion
.getDefaultInstance()
: version_;
} else {
return versionBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion version = 1;
* </code>
*/
public Builder setVersion(
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion value) {
if (versionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
version_ = value;
} else {
versionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion version = 1;
* </code>
*/
public Builder setVersion(
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion.Builder
builderForValue) {
if (versionBuilder_ == null) {
version_ = builderForValue.build();
} else {
versionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion version = 1;
* </code>
*/
public Builder mergeVersion(
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion value) {
if (versionBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& version_ != null
&& version_
!= com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion
.getDefaultInstance()) {
getVersionBuilder().mergeFrom(value);
} else {
version_ = value;
}
} else {
versionBuilder_.mergeFrom(value);
}
if (version_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion version = 1;
* </code>
*/
public Builder clearVersion() {
bitField0_ = (bitField0_ & ~0x00000001);
version_ = null;
if (versionBuilder_ != null) {
versionBuilder_.dispose();
versionBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion version = 1;
* </code>
*/
public com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion.Builder
getVersionBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getVersionFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion version = 1;
* </code>
*/
public com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersionOrBuilder
getVersionOrBuilder() {
if (versionBuilder_ != null) {
return versionBuilder_.getMessageOrBuilder();
} else {
return version_ == null
? com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion
.getDefaultInstance()
: version_;
}
}
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion version = 1;
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion,
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion.Builder,
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersionOrBuilder>
getVersionFieldBuilder() {
if (versionBuilder_ == null) {
versionBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion,
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerVersion.Builder,
com.google.cloud.gkehub.configmanagement.v1beta
.HierarchyControllerVersionOrBuilder>(
getVersion(), getParentForChildren(), isClean());
version_ = null;
}
return versionBuilder_;
}
private com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState
state_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState,
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState
.Builder,
com.google.cloud.gkehub.configmanagement.v1beta
.HierarchyControllerDeploymentStateOrBuilder>
stateBuilder_;
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState state = 2;
* </code>
*
* @return Whether the state field is set.
*/
public boolean hasState() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState state = 2;
* </code>
*
* @return The state.
*/
public com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState
getState() {
if (stateBuilder_ == null) {
return state_ == null
? com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState
.getDefaultInstance()
: state_;
} else {
return stateBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState state = 2;
* </code>
*/
public Builder setState(
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState value) {
if (stateBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
state_ = value;
} else {
stateBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState state = 2;
* </code>
*/
public Builder setState(
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState.Builder
builderForValue) {
if (stateBuilder_ == null) {
state_ = builderForValue.build();
} else {
stateBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState state = 2;
* </code>
*/
public Builder mergeState(
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState value) {
if (stateBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& state_ != null
&& state_
!= com.google.cloud.gkehub.configmanagement.v1beta
.HierarchyControllerDeploymentState.getDefaultInstance()) {
getStateBuilder().mergeFrom(value);
} else {
state_ = value;
}
} else {
stateBuilder_.mergeFrom(value);
}
if (state_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState state = 2;
* </code>
*/
public Builder clearState() {
bitField0_ = (bitField0_ & ~0x00000002);
state_ = null;
if (stateBuilder_ != null) {
stateBuilder_.dispose();
stateBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState state = 2;
* </code>
*/
public com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState
.Builder
getStateBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getStateFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState state = 2;
* </code>
*/
public com.google.cloud.gkehub.configmanagement.v1beta
.HierarchyControllerDeploymentStateOrBuilder
getStateOrBuilder() {
if (stateBuilder_ != null) {
return stateBuilder_.getMessageOrBuilder();
} else {
return state_ == null
? com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState
.getDefaultInstance()
: state_;
}
}
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState state = 2;
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState,
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState
.Builder,
com.google.cloud.gkehub.configmanagement.v1beta
.HierarchyControllerDeploymentStateOrBuilder>
getStateFieldBuilder() {
if (stateBuilder_ == null) {
stateBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState,
com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerDeploymentState
.Builder,
com.google.cloud.gkehub.configmanagement.v1beta
.HierarchyControllerDeploymentStateOrBuilder>(
getState(), getParentForChildren(), isClean());
state_ = null;
}
return stateBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState)
}
// @@protoc_insertion_point(class_scope:google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState)
private static final com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState();
}
public static com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<HierarchyControllerState> PARSER =
new com.google.protobuf.AbstractParser<HierarchyControllerState>() {
@java.lang.Override
public HierarchyControllerState parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<HierarchyControllerState> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<HierarchyControllerState> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1beta.HierarchyControllerState
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/maven-resolver | 36,140 | maven-resolver-impl/src/test/java/org/eclipse/aether/internal/impl/DefaultArtifactResolverTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.eclipse.aether.internal.impl;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.eclipse.aether.DefaultRepositorySystemSession;
import org.eclipse.aether.RepositoryEvent;
import org.eclipse.aether.RepositoryEvent.EventType;
import org.eclipse.aether.RepositorySystemSession;
import org.eclipse.aether.artifact.Artifact;
import org.eclipse.aether.artifact.ArtifactProperties;
import org.eclipse.aether.artifact.DefaultArtifact;
import org.eclipse.aether.impl.UpdateCheckManager;
import org.eclipse.aether.impl.VersionResolver;
import org.eclipse.aether.internal.impl.filter.DefaultRemoteRepositoryFilterManager;
import org.eclipse.aether.internal.impl.filter.Filters;
import org.eclipse.aether.internal.test.util.TestFileUtils;
import org.eclipse.aether.internal.test.util.TestLocalRepositoryManager;
import org.eclipse.aether.internal.test.util.TestUtils;
import org.eclipse.aether.metadata.Metadata;
import org.eclipse.aether.repository.LocalArtifactRegistration;
import org.eclipse.aether.repository.LocalArtifactRequest;
import org.eclipse.aether.repository.LocalArtifactResult;
import org.eclipse.aether.repository.LocalMetadataRegistration;
import org.eclipse.aether.repository.LocalMetadataRequest;
import org.eclipse.aether.repository.LocalMetadataResult;
import org.eclipse.aether.repository.LocalRepository;
import org.eclipse.aether.repository.LocalRepositoryManager;
import org.eclipse.aether.repository.RemoteRepository;
import org.eclipse.aether.repository.RepositoryPolicy;
import org.eclipse.aether.repository.WorkspaceReader;
import org.eclipse.aether.repository.WorkspaceRepository;
import org.eclipse.aether.resolution.ArtifactRequest;
import org.eclipse.aether.resolution.ArtifactResolutionException;
import org.eclipse.aether.resolution.ArtifactResult;
import org.eclipse.aether.resolution.VersionRequest;
import org.eclipse.aether.resolution.VersionResolutionException;
import org.eclipse.aether.resolution.VersionResult;
import org.eclipse.aether.spi.connector.ArtifactDownload;
import org.eclipse.aether.spi.connector.MetadataDownload;
import org.eclipse.aether.spi.connector.filter.RemoteRepositoryFilterSource;
import org.eclipse.aether.spi.io.PathProcessorSupport;
import org.eclipse.aether.transfer.ArtifactNotFoundException;
import org.eclipse.aether.transfer.ArtifactTransferException;
import org.eclipse.aether.util.repository.SimpleResolutionErrorPolicy;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
/**
*/
public class DefaultArtifactResolverTest {
private DefaultArtifactResolver resolver;
private DefaultRepositorySystemSession session;
private TestLocalRepositoryManager lrm;
private StubRepositoryConnectorProvider repositoryConnectorProvider;
private Artifact artifact;
private RecordingRepositoryConnector connector;
private HashMap<String, RemoteRepositoryFilterSource> remoteRepositoryFilterSources;
private DefaultRemoteRepositoryFilterManager remoteRepositoryFilterManager;
@BeforeEach
void setup() {
remoteRepositoryFilterSources = new HashMap<>();
remoteRepositoryFilterManager = new DefaultRemoteRepositoryFilterManager(remoteRepositoryFilterSources);
UpdateCheckManager updateCheckManager = new StaticUpdateCheckManager(true);
repositoryConnectorProvider = new StubRepositoryConnectorProvider();
VersionResolver versionResolver = new StubVersionResolver();
session = TestUtils.newSession();
lrm = (TestLocalRepositoryManager) session.getLocalRepositoryManager();
resolver = setupArtifactResolver(versionResolver, updateCheckManager);
artifact = new DefaultArtifact("gid", "aid", "", "ext", "ver");
connector = new RecordingRepositoryConnector();
repositoryConnectorProvider.setConnector(connector);
}
private DefaultArtifactResolver setupArtifactResolver(
VersionResolver versionResolver, UpdateCheckManager updateCheckManager) {
return new DefaultArtifactResolver(
new PathProcessorSupport(),
new StubRepositoryEventDispatcher(),
versionResolver,
updateCheckManager,
repositoryConnectorProvider,
new StubRemoteRepositoryManager(),
new StubSyncContextFactory(),
new DefaultOfflineController(),
Collections.emptyMap(),
remoteRepositoryFilterManager);
}
@AfterEach
void teardown() throws Exception {
if (session.getLocalRepository() != null) {
TestFileUtils.deleteFile(session.getLocalRepository().getBasedir());
}
}
@Test
void testResolveLocalArtifactSuccessful() throws IOException, ArtifactResolutionException {
File tmpFile = TestFileUtils.createTempFile("tmp");
Map<String, String> properties = new HashMap<>();
properties.put(ArtifactProperties.LOCAL_PATH, tmpFile.getAbsolutePath());
artifact = artifact.setProperties(properties);
ArtifactRequest request = new ArtifactRequest(artifact, null, "");
ArtifactResult result = resolver.resolveArtifact(session, request);
assertTrue(result.getExceptions().isEmpty());
Artifact resolved = result.getArtifact();
assertNotNull(resolved.getFile());
resolved = resolved.setFile(null);
assertEquals(artifact, resolved);
}
@Test
void testResolveLocalArtifactUnsuccessful() throws IOException {
File tmpFile = TestFileUtils.createTempFile("tmp");
Map<String, String> properties = new HashMap<>();
properties.put(ArtifactProperties.LOCAL_PATH, tmpFile.getAbsolutePath());
artifact = artifact.setProperties(properties);
tmpFile.delete();
ArtifactRequest request = new ArtifactRequest(artifact, null, "");
try {
resolver.resolveArtifact(session, request);
fail("expected exception");
} catch (ArtifactResolutionException e) {
assertNotNull(e.getResults());
assertEquals(1, e.getResults().size());
ArtifactResult result = e.getResults().get(0);
assertSame(request, result.getRequest());
assertFalse(result.getExceptions().isEmpty());
assertInstanceOf(
ArtifactNotFoundException.class, result.getExceptions().get(0));
Artifact resolved = result.getArtifact();
assertNull(resolved);
}
}
@Test
void testResolveRemoteArtifact() throws ArtifactResolutionException {
connector.setExpectGet(artifact);
ArtifactRequest request = new ArtifactRequest(artifact, null, "");
request.addRepository(new RemoteRepository.Builder("id", "default", "file:///").build());
ArtifactResult result = resolver.resolveArtifact(session, request);
assertTrue(result.getExceptions().isEmpty());
Artifact resolved = result.getArtifact();
assertNotNull(resolved.getFile());
resolved = resolved.setFile(null);
assertEquals(artifact, resolved);
connector.assertSeenExpected();
}
@Test
void testResolveRemoteArtifactUnsuccessful() {
RecordingRepositoryConnector connector = new RecordingRepositoryConnector() {
@Override
public void get(
Collection<? extends ArtifactDownload> artifactDownloads,
Collection<? extends MetadataDownload> metadataDownloads) {
super.get(artifactDownloads, metadataDownloads);
ArtifactDownload download = artifactDownloads.iterator().next();
ArtifactTransferException exception =
new ArtifactNotFoundException(download.getArtifact(), null, "not found");
download.setException(exception);
}
};
connector.setExpectGet(artifact);
repositoryConnectorProvider.setConnector(connector);
ArtifactRequest request = new ArtifactRequest(artifact, null, "");
request.addRepository(new RemoteRepository.Builder("id", "default", "file:///").build());
try {
resolver.resolveArtifact(session, request);
fail("expected exception");
} catch (ArtifactResolutionException e) {
connector.assertSeenExpected();
assertNotNull(e.getResults());
assertEquals(1, e.getResults().size());
ArtifactResult result = e.getResults().get(0);
assertSame(request, result.getRequest());
assertFalse(result.getExceptions().isEmpty());
assertInstanceOf(
ArtifactNotFoundException.class, result.getExceptions().get(0));
Artifact resolved = result.getArtifact();
assertNull(resolved);
}
}
@Test
void testResolveRemoteArtifactAlwaysAcceptFilter() throws ArtifactResolutionException {
remoteRepositoryFilterSources.put("filter1", Filters.neverAcceptFrom("invalid repo id"));
remoteRepositoryFilterSources.put("filter2", Filters.alwaysAccept());
connector.setExpectGet(artifact);
ArtifactRequest request = new ArtifactRequest(artifact, null, "");
request.addRepository(new RemoteRepository.Builder("id", "default", "file:///").build());
ArtifactResult result = resolver.resolveArtifact(session, request);
assertTrue(result.getExceptions().isEmpty());
Artifact resolved = result.getArtifact();
assertNotNull(resolved.getFile());
resolved = resolved.setFile(null);
assertEquals(artifact, resolved);
connector.assertSeenExpected();
}
@Test
void testResolveRemoteArtifactNeverAcceptFilter() {
remoteRepositoryFilterSources.put("filter1", Filters.neverAcceptFrom("invalid repo id"));
remoteRepositoryFilterSources.put("filter2", Filters.neverAccept());
// connector.setExpectGet( artifact ); // should not see it
ArtifactRequest request = new ArtifactRequest(artifact, null, "project");
request.addRepository(new RemoteRepository.Builder("id", "default", "file:///").build());
try {
resolver.resolveArtifact(session, request);
fail("expected exception");
} catch (ArtifactResolutionException e) {
connector.assertSeenExpected();
assertNotNull(e.getResults());
assertEquals(1, e.getResults().size());
ArtifactResult result = e.getResults().get(0);
assertSame(request, result.getRequest());
assertFalse(result.getExceptions().isEmpty());
assertInstanceOf(
ArtifactNotFoundException.class, result.getExceptions().get(0));
assertEquals("never-accept", result.getExceptions().get(0).getMessage());
Artifact resolved = result.getArtifact();
assertNull(resolved);
}
}
@Test
void testResolveRemoteArtifactAlwaysAcceptFromRepoFilter() throws ArtifactResolutionException {
remoteRepositoryFilterSources.put("filter1", Filters.alwaysAcceptFrom("id"));
connector.setExpectGet(artifact);
ArtifactRequest request = new ArtifactRequest(artifact, null, "");
request.addRepository(new RemoteRepository.Builder("id", "default", "file:///").build());
ArtifactResult result = resolver.resolveArtifact(session, request);
assertTrue(result.getExceptions().isEmpty());
Artifact resolved = result.getArtifact();
assertNotNull(resolved.getFile());
resolved = resolved.setFile(null);
assertEquals(artifact, resolved);
connector.assertSeenExpected();
}
@Test
void testResolveRemoteArtifactNeverAcceptFilterFromRepo() {
remoteRepositoryFilterSources.put("filter1", Filters.neverAcceptFrom("id"));
// connector.setExpectGet( artifact ); // should not see it
ArtifactRequest request = new ArtifactRequest(artifact, null, "project");
request.addRepository(new RemoteRepository.Builder("id", "default", "file:///").build());
try {
resolver.resolveArtifact(session, request);
fail("expected exception");
} catch (ArtifactResolutionException e) {
connector.assertSeenExpected();
assertNotNull(e.getResults());
assertEquals(1, e.getResults().size());
ArtifactResult result = e.getResults().get(0);
assertSame(request, result.getRequest());
assertFalse(result.getExceptions().isEmpty());
assertInstanceOf(
ArtifactNotFoundException.class, result.getExceptions().get(0));
assertEquals("never-accept-id", result.getExceptions().get(0).getMessage());
Artifact resolved = result.getArtifact();
assertNull(resolved);
}
}
@Test
void testArtifactNotFoundCache() throws Exception {
RecordingRepositoryConnector connector = new RecordingRepositoryConnector() {
@Override
public void get(
Collection<? extends ArtifactDownload> artifactDownloads,
Collection<? extends MetadataDownload> metadataDownloads) {
super.get(artifactDownloads, metadataDownloads);
for (ArtifactDownload download : artifactDownloads) {
download.getFile().delete();
ArtifactTransferException exception =
new ArtifactNotFoundException(download.getArtifact(), null, "not found");
download.setException(exception);
}
}
};
repositoryConnectorProvider.setConnector(connector);
resolver = setupArtifactResolver(
new StubVersionResolver(),
new DefaultUpdateCheckManager(
new DefaultTrackingFileManager(),
new DefaultUpdatePolicyAnalyzer(),
new DefaultPathProcessor()));
session.setResolutionErrorPolicy(new SimpleResolutionErrorPolicy(true, false));
session.setUpdatePolicy(RepositoryPolicy.UPDATE_POLICY_NEVER);
RemoteRepository remoteRepo = new RemoteRepository.Builder("id", "default", "file:///").build();
Artifact artifact1 = artifact;
Artifact artifact2 = artifact.setVersion("ver2");
ArtifactRequest request1 = new ArtifactRequest(artifact1, Arrays.asList(remoteRepo), "");
ArtifactRequest request2 = new ArtifactRequest(artifact2, Arrays.asList(remoteRepo), "");
connector.setExpectGet(artifact1, artifact2);
try {
resolver.resolveArtifacts(session, Arrays.asList(request1, request2));
fail("expected exception");
} catch (ArtifactResolutionException e) {
connector.assertSeenExpected();
}
TestFileUtils.writeString(
new File(lrm.getRepository().getBasedir(), lrm.getPathForLocalArtifact(artifact2)), "artifact");
lrm.setArtifactAvailability(artifact2, false);
DefaultUpdateCheckManagerTest.resetSessionData(session);
connector.resetActual();
connector.setExpectGet(new Artifact[0]);
try {
resolver.resolveArtifacts(session, Arrays.asList(request1, request2));
fail("expected exception");
} catch (ArtifactResolutionException e) {
connector.assertSeenExpected();
for (ArtifactResult result : e.getResults()) {
Exception ex = result.getExceptions().get(0);
assertInstanceOf(ArtifactNotFoundException.class, ex, ex.toString());
assertTrue(ex.getMessage().contains("cached"), ex.toString());
}
}
}
@Test
void testResolveFromWorkspace() throws IOException, ArtifactResolutionException {
WorkspaceReader workspace = new WorkspaceReader() {
public WorkspaceRepository getRepository() {
return new WorkspaceRepository("default");
}
public List<String> findVersions(Artifact artifact) {
return Arrays.asList(artifact.getVersion());
}
public File findArtifact(Artifact artifact) {
try {
return TestFileUtils.createTempFile(artifact.toString());
} catch (IOException e) {
throw new RuntimeException(e.getMessage(), e);
}
}
};
session.setWorkspaceReader(workspace);
ArtifactRequest request = new ArtifactRequest(artifact, null, "");
request.addRepository(new RemoteRepository.Builder("id", "default", "file:///").build());
ArtifactResult result = resolver.resolveArtifact(session, request);
assertTrue(result.getExceptions().isEmpty());
Artifact resolved = result.getArtifact();
assertNotNull(resolved.getFile());
assertEquals(resolved.toString(), TestFileUtils.readString(resolved.getFile()));
resolved = resolved.setFile(null);
assertEquals(artifact, resolved);
connector.assertSeenExpected();
}
@Test
void testResolveFromWorkspaceFallbackToRepository() throws ArtifactResolutionException {
WorkspaceReader workspace = new WorkspaceReader() {
public WorkspaceRepository getRepository() {
return new WorkspaceRepository("default");
}
public List<String> findVersions(Artifact artifact) {
return Arrays.asList(artifact.getVersion());
}
public File findArtifact(Artifact artifact) {
return null;
}
};
session.setWorkspaceReader(workspace);
connector.setExpectGet(artifact);
repositoryConnectorProvider.setConnector(connector);
ArtifactRequest request = new ArtifactRequest(artifact, null, "");
request.addRepository(new RemoteRepository.Builder("id", "default", "file:///").build());
ArtifactResult result = resolver.resolveArtifact(session, request);
assertTrue(result.getExceptions().isEmpty(), "exception on resolveArtifact");
Artifact resolved = result.getArtifact();
assertNotNull(resolved.getFile());
resolved = resolved.setFile(null);
assertEquals(artifact, resolved);
connector.assertSeenExpected();
}
@Test
void testRepositoryEventsSuccessfulLocal() throws ArtifactResolutionException, IOException {
RecordingRepositoryListener listener = new RecordingRepositoryListener();
session.setRepositoryListener(listener);
File tmpFile = TestFileUtils.createTempFile("tmp");
Map<String, String> properties = new HashMap<>();
properties.put(ArtifactProperties.LOCAL_PATH, tmpFile.getAbsolutePath());
artifact = artifact.setProperties(properties);
ArtifactRequest request = new ArtifactRequest(artifact, null, "");
resolver.resolveArtifact(session, request);
List<RepositoryEvent> events = listener.getEvents();
assertEquals(2, events.size());
RepositoryEvent event = events.get(0);
assertEquals(EventType.ARTIFACT_RESOLVING, event.getType());
assertNull(event.getException());
assertEquals(artifact, event.getArtifact());
event = events.get(1);
assertEquals(EventType.ARTIFACT_RESOLVED, event.getType());
assertNull(event.getException());
assertEquals(artifact, event.getArtifact().setFile(null));
}
@Test
void testRepositoryEventsUnsuccessfulLocal() {
RecordingRepositoryListener listener = new RecordingRepositoryListener();
session.setRepositoryListener(listener);
Map<String, String> properties = new HashMap<>();
properties.put(ArtifactProperties.LOCAL_PATH, "doesnotexist");
artifact = artifact.setProperties(properties);
ArtifactRequest request = new ArtifactRequest(artifact, null, "");
try {
resolver.resolveArtifact(session, request);
fail("expected exception");
} catch (ArtifactResolutionException ignored) {
}
List<RepositoryEvent> events = listener.getEvents();
assertEquals(2, events.size());
RepositoryEvent event = events.get(0);
assertEquals(artifact, event.getArtifact());
assertEquals(EventType.ARTIFACT_RESOLVING, event.getType());
event = events.get(1);
assertEquals(artifact, event.getArtifact());
assertEquals(EventType.ARTIFACT_RESOLVED, event.getType());
assertNotNull(event.getException());
assertEquals(1, event.getExceptions().size());
}
@Test
void testRepositoryEventsSuccessfulRemote() throws ArtifactResolutionException {
RecordingRepositoryListener listener = new RecordingRepositoryListener();
session.setRepositoryListener(listener);
ArtifactRequest request = new ArtifactRequest(artifact, null, "");
request.addRepository(new RemoteRepository.Builder("id", "default", "file:///").build());
resolver.resolveArtifact(session, request);
List<RepositoryEvent> events = listener.getEvents();
assertEquals(4, events.size(), events.toString());
RepositoryEvent event = events.get(0);
assertEquals(EventType.ARTIFACT_RESOLVING, event.getType());
assertNull(event.getException());
assertEquals(artifact, event.getArtifact());
event = events.get(1);
assertEquals(EventType.ARTIFACT_DOWNLOADING, event.getType());
assertNull(event.getException());
assertEquals(artifact, event.getArtifact().setFile(null));
event = events.get(2);
assertEquals(EventType.ARTIFACT_DOWNLOADED, event.getType());
assertNull(event.getException());
assertEquals(artifact, event.getArtifact().setFile(null));
event = events.get(3);
assertEquals(EventType.ARTIFACT_RESOLVED, event.getType());
assertNull(event.getException());
assertEquals(artifact, event.getArtifact().setFile(null));
}
@Test
void testRepositoryEventsUnsuccessfulRemote() {
RecordingRepositoryConnector connector = new RecordingRepositoryConnector() {
@Override
public void get(
Collection<? extends ArtifactDownload> artifactDownloads,
Collection<? extends MetadataDownload> metadataDownloads) {
super.get(artifactDownloads, metadataDownloads);
ArtifactDownload download = artifactDownloads.iterator().next();
ArtifactTransferException exception =
new ArtifactNotFoundException(download.getArtifact(), null, "not found");
download.setException(exception);
}
};
repositoryConnectorProvider.setConnector(connector);
RecordingRepositoryListener listener = new RecordingRepositoryListener();
session.setRepositoryListener(listener);
ArtifactRequest request = new ArtifactRequest(artifact, null, "");
request.addRepository(new RemoteRepository.Builder("id", "default", "file:///").build());
try {
resolver.resolveArtifact(session, request);
fail("expected exception");
} catch (ArtifactResolutionException ignored) {
}
List<RepositoryEvent> events = listener.getEvents();
assertEquals(4, events.size(), events.toString());
RepositoryEvent event = events.get(0);
assertEquals(artifact, event.getArtifact());
assertEquals(EventType.ARTIFACT_RESOLVING, event.getType());
event = events.get(1);
assertEquals(artifact, event.getArtifact());
assertEquals(EventType.ARTIFACT_DOWNLOADING, event.getType());
event = events.get(2);
assertEquals(artifact, event.getArtifact());
assertEquals(EventType.ARTIFACT_DOWNLOADED, event.getType());
assertNotNull(event.getException());
assertEquals(1, event.getExceptions().size());
event = events.get(3);
assertEquals(artifact, event.getArtifact());
assertEquals(EventType.ARTIFACT_RESOLVED, event.getType());
assertNotNull(event.getException());
assertEquals(1, event.getExceptions().size());
}
@Test
void testVersionResolverFails() {
resolver = setupArtifactResolver(
new VersionResolver() {
@Override
public VersionResult resolveVersion(RepositorySystemSession session, VersionRequest request)
throws VersionResolutionException {
throw new VersionResolutionException(new VersionResult(request));
}
},
new StaticUpdateCheckManager(true));
ArtifactRequest request = new ArtifactRequest(artifact, null, "");
try {
resolver.resolveArtifact(session, request);
fail("expected exception");
} catch (ArtifactResolutionException e) {
connector.assertSeenExpected();
assertNotNull(e.getResults());
assertEquals(1, e.getResults().size());
ArtifactResult result = e.getResults().get(0);
assertSame(request, result.getRequest());
assertFalse(result.getExceptions().isEmpty());
assertInstanceOf(
VersionResolutionException.class, result.getExceptions().get(0));
Artifact resolved = result.getArtifact();
assertNull(resolved);
}
}
@Test
void testRepositoryEventsOnVersionResolverFail() {
resolver = setupArtifactResolver(
new VersionResolver() {
@Override
public VersionResult resolveVersion(RepositorySystemSession session, VersionRequest request)
throws VersionResolutionException {
throw new VersionResolutionException(new VersionResult(request));
}
},
new StaticUpdateCheckManager(true));
RecordingRepositoryListener listener = new RecordingRepositoryListener();
session.setRepositoryListener(listener);
ArtifactRequest request = new ArtifactRequest(artifact, null, "");
try {
resolver.resolveArtifact(session, request);
fail("expected exception");
} catch (ArtifactResolutionException ignored) {
}
List<RepositoryEvent> events = listener.getEvents();
assertEquals(2, events.size());
RepositoryEvent event = events.get(0);
assertEquals(artifact, event.getArtifact());
assertEquals(EventType.ARTIFACT_RESOLVING, event.getType());
event = events.get(1);
assertEquals(artifact, event.getArtifact());
assertEquals(EventType.ARTIFACT_RESOLVED, event.getType());
assertNotNull(event.getException());
assertEquals(1, event.getExceptions().size());
}
@Test
void testLocalArtifactAvailable() throws ArtifactResolutionException {
session.setLocalRepositoryManager(new LocalRepositoryManager() {
public LocalRepository getRepository() {
return null;
}
public String getPathForRemoteMetadata(Metadata metadata, RemoteRepository repository, String context) {
return null;
}
public String getPathForRemoteArtifact(Artifact artifact, RemoteRepository repository, String context) {
return null;
}
public String getPathForLocalMetadata(Metadata metadata) {
return null;
}
public String getPathForLocalArtifact(Artifact artifact) {
return null;
}
public LocalArtifactResult find(RepositorySystemSession session, LocalArtifactRequest request) {
LocalArtifactResult result = new LocalArtifactResult(request);
result.setAvailable(true);
try {
result.setFile(TestFileUtils.createTempFile(""));
} catch (IOException e) {
e.printStackTrace();
}
return result;
}
public void add(RepositorySystemSession session, LocalArtifactRegistration request) {}
public LocalMetadataResult find(RepositorySystemSession session, LocalMetadataRequest request) {
LocalMetadataResult result = new LocalMetadataResult(request);
try {
result.setFile(TestFileUtils.createTempFile(""));
} catch (IOException e) {
e.printStackTrace();
}
return result;
}
public void add(RepositorySystemSession session, LocalMetadataRegistration request) {}
});
ArtifactRequest request = new ArtifactRequest(artifact, null, "");
request.addRepository(new RemoteRepository.Builder("id", "default", "file:///").build());
ArtifactResult result = resolver.resolveArtifact(session, request);
assertTrue(result.getExceptions().isEmpty());
Artifact resolved = result.getArtifact();
assertNotNull(resolved.getFile());
resolved = resolved.setFile(null);
assertEquals(artifact, resolved);
}
@Test
void testFindInLocalRepositoryWhenVersionWasFoundInLocalRepository() throws ArtifactResolutionException {
session.setLocalRepositoryManager(new LocalRepositoryManager() {
public LocalRepository getRepository() {
return new LocalRepository(new File(""));
}
public String getPathForRemoteMetadata(Metadata metadata, RemoteRepository repository, String context) {
return null;
}
public String getPathForRemoteArtifact(Artifact artifact, RemoteRepository repository, String context) {
return null;
}
public String getPathForLocalMetadata(Metadata metadata) {
return null;
}
public String getPathForLocalArtifact(Artifact artifact) {
return null;
}
public LocalArtifactResult find(RepositorySystemSession session, LocalArtifactRequest request) {
LocalArtifactResult result = new LocalArtifactResult(request);
result.setAvailable(false);
try {
result.setFile(TestFileUtils.createTempFile(""));
} catch (IOException e) {
e.printStackTrace();
}
return result;
}
public void add(RepositorySystemSession session, LocalArtifactRegistration request) {}
public LocalMetadataResult find(RepositorySystemSession session, LocalMetadataRequest request) {
return new LocalMetadataResult(request);
}
public void add(RepositorySystemSession session, LocalMetadataRegistration request) {}
});
ArtifactRequest request = new ArtifactRequest(artifact, null, "");
request.addRepository(new RemoteRepository.Builder("id", "default", "file:///").build());
resolver = setupArtifactResolver(
new VersionResolver() {
@Override
public VersionResult resolveVersion(RepositorySystemSession session, VersionRequest request) {
return new VersionResult(request)
.setRepository(new LocalRepository("id"))
.setVersion(request.getArtifact().getVersion());
}
},
new StaticUpdateCheckManager(true));
ArtifactResult result = resolver.resolveArtifact(session, request);
assertTrue(result.getExceptions().isEmpty());
Artifact resolved = result.getArtifact();
assertNotNull(resolved.getFile());
resolved = resolved.setFile(null);
assertEquals(artifact, resolved);
}
@Test
void testFindInLocalRepositoryWhenVersionRangeWasResolvedFromLocalRepository() throws ArtifactResolutionException {
session.setLocalRepositoryManager(new LocalRepositoryManager() {
public LocalRepository getRepository() {
return new LocalRepository(new File(""));
}
public String getPathForRemoteMetadata(Metadata metadata, RemoteRepository repository, String context) {
return null;
}
public String getPathForRemoteArtifact(Artifact artifact, RemoteRepository repository, String context) {
return null;
}
public String getPathForLocalMetadata(Metadata metadata) {
return null;
}
public String getPathForLocalArtifact(Artifact artifact) {
return null;
}
public LocalArtifactResult find(RepositorySystemSession session, LocalArtifactRequest request) {
LocalArtifactResult result = new LocalArtifactResult(request);
result.setAvailable(false);
try {
result.setFile(TestFileUtils.createTempFile(""));
} catch (IOException e) {
e.printStackTrace();
}
return result;
}
public void add(RepositorySystemSession session, LocalArtifactRegistration request) {}
public LocalMetadataResult find(RepositorySystemSession session, LocalMetadataRequest request) {
return new LocalMetadataResult(request);
}
public void add(RepositorySystemSession session, LocalMetadataRegistration request) {}
});
ArtifactRequest request = new ArtifactRequest(artifact, null, "");
resolver = setupArtifactResolver(
new VersionResolver() {
@Override
public VersionResult resolveVersion(RepositorySystemSession session, VersionRequest request) {
return new VersionResult(request)
.setVersion(request.getArtifact().getVersion());
}
},
new StaticUpdateCheckManager(true));
ArtifactResult result = resolver.resolveArtifact(session, request);
assertTrue(result.getExceptions().isEmpty());
Artifact resolved = result.getArtifact();
assertNotNull(resolved.getFile());
resolved = resolved.setFile(null);
assertEquals(artifact, resolved);
}
}
|
apache/phoenix | 36,041 | phoenix-core-server/src/main/java/org/apache/phoenix/mapreduce/index/IndexUpgradeTool.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.mapreduce.index;
import static org.apache.phoenix.query.QueryServicesOptions.GLOBAL_INDEX_CHECKER_ENABLED_MAP_EXPIRATION_MIN;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.logging.FileHandler;
import java.util.logging.Logger;
import java.util.logging.SimpleFormatter;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.phoenix.hbase.index.IndexRegionObserver;
import org.apache.phoenix.hbase.index.Indexer;
import org.apache.phoenix.hbase.index.covered.NonTxIndexBuilder;
import org.apache.phoenix.index.GlobalIndexChecker;
import org.apache.phoenix.index.PhoenixIndexBuilder;
import org.apache.phoenix.index.PhoenixIndexCodec;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.mapreduce.util.ConnectionUtil;
import org.apache.phoenix.query.ConnectionQueryServices;
import org.apache.phoenix.query.QueryServices;
import org.apache.phoenix.query.QueryServicesOptions;
import org.apache.phoenix.schema.PTable;
import org.apache.phoenix.schema.PTableType;
import org.apache.phoenix.util.EnvironmentEdgeManager;
import org.apache.phoenix.util.IndexUtil;
import org.apache.phoenix.util.MetaDataUtil;
import org.apache.phoenix.util.SchemaUtil;
import org.apache.phoenix.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.phoenix.thirdparty.com.google.common.base.Strings;
import org.apache.phoenix.thirdparty.org.apache.commons.cli.CommandLine;
import org.apache.phoenix.thirdparty.org.apache.commons.cli.CommandLineParser;
import org.apache.phoenix.thirdparty.org.apache.commons.cli.DefaultParser;
import org.apache.phoenix.thirdparty.org.apache.commons.cli.HelpFormatter;
import org.apache.phoenix.thirdparty.org.apache.commons.cli.Option;
import org.apache.phoenix.thirdparty.org.apache.commons.cli.Options;
import org.apache.phoenix.thirdparty.org.apache.commons.cli.ParseException;
public class IndexUpgradeTool extends Configured implements Tool {
private static final Logger LOGGER = Logger.getLogger(IndexUpgradeTool.class.getName());
private static final String INDEX_REBUILD_OPTION_SHORT_OPT = "rb";
private static final String INDEX_TOOL_OPTION_SHORT_OPT = "tool";
private static final Option OPERATION_OPTION =
new Option("o", "operation", true, "[Required] Operation to perform (upgrade/rollback)");
private static final Option TABLE_OPTION =
new Option("tb", "table", true, "[Required] Tables list ex. table1,table2");
private static final Option TABLE_CSV_FILE_OPTION =
new Option("f", "file", true, "[Optional] Tables list in a csv file");
private static final Option DRY_RUN_OPTION = new Option("d", "dry-run", false,
"[Optional] If passed this will output steps that will be executed");
private static final Option HELP_OPTION = new Option("h", "help", false, "Help");
private static final Option LOG_FILE_OPTION =
new Option("lf", "logfile", true, "[Optional] Log file path where the logs are written");
private static final Option INDEX_REBUILD_OPTION = new Option(INDEX_REBUILD_OPTION_SHORT_OPT,
"index-rebuild", false, "[Optional] Rebuild the indexes. Set -" + INDEX_TOOL_OPTION_SHORT_OPT
+ " to pass options to IndexTool.");
private static final Option INDEX_TOOL_OPTION = new Option(INDEX_TOOL_OPTION_SHORT_OPT,
"index-tool", true, "[Optional] Options to pass to indexTool when rebuilding indexes. "
+ "Set -" + INDEX_REBUILD_OPTION_SHORT_OPT + " to rebuild the index.");
public static final String UPGRADE_OP = "upgrade";
public static final String ROLLBACK_OP = "rollback";
private static final String GLOBAL_INDEX_ID = "#NA#";
private IndexTool indexingTool;
private HashMap<String, HashSet<String>> tablesAndIndexes = new HashMap<>();
private HashMap<String, HashMap<String, IndexInfo>> rebuildMap = new HashMap<>();
private HashMap<String, String> prop = new HashMap<>();
private HashMap<String, String> emptyProp = new HashMap<>();
private boolean dryRun, upgrade, rebuild;
private String operation;
private String inputTables;
private String logFile;
private String inputFile;
private boolean isWaitComplete = false;
private String indexToolOpts;
private boolean test = false;
private boolean failUpgradeTask = false;
private boolean failDowngradeTask = false;
private boolean hasFailure = false;
public void setDryRun(boolean dryRun) {
this.dryRun = dryRun;
}
public void setInputTables(String inputTables) {
this.inputTables = inputTables;
}
public void setLogFile(String logFile) {
this.logFile = logFile;
}
public void setInputFile(String inputFile) {
this.inputFile = inputFile;
}
public void setTest(boolean test) {
this.test = test;
}
public boolean getIsWaitComplete() {
return this.isWaitComplete;
}
public boolean getDryRun() {
return this.dryRun;
}
public String getInputTables() {
return this.inputTables;
}
public String getLogFile() {
return this.logFile;
}
public String getOperation() {
return this.operation;
}
public boolean getIsRebuild() {
return this.rebuild;
}
public String getIndexToolOpts() {
return this.indexToolOpts;
}
@VisibleForTesting
public void setFailUpgradeTask(boolean failInitialTask) {
this.failUpgradeTask = failInitialTask;
}
public void setFailDowngradeTask(boolean failRollbackTask) {
this.failDowngradeTask = failRollbackTask;
}
public IndexUpgradeTool(String mode, String tables, String inputFile, String outputFile,
boolean dryRun, IndexTool indexTool, boolean rebuild) {
this.operation = mode;
this.inputTables = tables;
this.inputFile = inputFile;
this.logFile = outputFile;
this.dryRun = dryRun;
this.indexingTool = indexTool;
this.rebuild = rebuild;
}
public IndexUpgradeTool() {
}
@Override
public int run(String[] args) throws Exception {
CommandLine cmdLine = null;
try {
cmdLine = parseOptions(args);
LOGGER.info("Index Upgrade tool initiated: " + String.join(",", args));
} catch (IllegalStateException e) {
printHelpAndExit(e.getMessage(), getOptions());
}
try {
initializeTool(cmdLine);
prepareToolSetup();
executeTool();
} catch (Exception e) {
e.printStackTrace();
hasFailure = true;
}
if (hasFailure) {
return -1;
} else {
return 0;
}
}
/**
* Parses the commandline arguments, throws IllegalStateException if mandatory arguments are
* missing.
* @param args supplied command line arguments
* @return the parsed command line
*/
@VisibleForTesting
public CommandLine parseOptions(String[] args) {
final Options options = getOptions();
CommandLineParser parser = DefaultParser.builder().setAllowPartialMatching(false)
.setStripLeadingAndTrailingQuotes(false).build();
CommandLine cmdLine = null;
try {
cmdLine = parser.parse(options, args);
} catch (ParseException e) {
printHelpAndExit("severe parsing command line options: " + e.getMessage(), options);
}
if (cmdLine.hasOption(HELP_OPTION.getOpt())) {
printHelpAndExit(options, 0);
}
if (!cmdLine.hasOption(OPERATION_OPTION.getOpt())) {
throw new IllegalStateException(OPERATION_OPTION.getLongOpt() + " is a mandatory parameter");
}
if (
cmdLine.hasOption(DRY_RUN_OPTION.getOpt()) && !cmdLine.hasOption(LOG_FILE_OPTION.getOpt())
) {
throw new IllegalStateException("Log file with " + TABLE_OPTION.getLongOpt()
+ " is mandatory if " + DRY_RUN_OPTION.getLongOpt() + " is passed");
}
if (
!(cmdLine.hasOption(TABLE_OPTION.getOpt()))
&& !(cmdLine.hasOption(TABLE_CSV_FILE_OPTION.getOpt()))
) {
throw new IllegalStateException("Tables list should be passed in either with"
+ TABLE_OPTION.getLongOpt() + " or " + TABLE_CSV_FILE_OPTION.getLongOpt());
}
if (
(cmdLine.hasOption(TABLE_OPTION.getOpt()))
&& (cmdLine.hasOption(TABLE_CSV_FILE_OPTION.getOpt()))
) {
throw new IllegalStateException("Tables list passed in with" + TABLE_OPTION.getLongOpt()
+ " and " + TABLE_CSV_FILE_OPTION.getLongOpt() + "; specify only one.");
}
if (
(cmdLine.hasOption(INDEX_TOOL_OPTION.getOpt()))
&& !cmdLine.hasOption(INDEX_REBUILD_OPTION.getOpt())
) {
throw new IllegalStateException(
"Index tool options should be passed in with " + INDEX_REBUILD_OPTION.getLongOpt());
}
return cmdLine;
}
private void printHelpAndExit(String severeMessage, Options options) {
System.err.println(severeMessage);
printHelpAndExit(options, 1);
}
private void printHelpAndExit(Options options, int exitCode) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("help", options);
System.exit(exitCode);
}
private Options getOptions() {
final Options options = new Options();
options.addOption(OPERATION_OPTION);
TABLE_OPTION.setOptionalArg(true);
options.addOption(TABLE_OPTION);
TABLE_CSV_FILE_OPTION.setOptionalArg(true);
options.addOption(TABLE_CSV_FILE_OPTION);
DRY_RUN_OPTION.setOptionalArg(true);
options.addOption(DRY_RUN_OPTION);
LOG_FILE_OPTION.setOptionalArg(true);
options.addOption(LOG_FILE_OPTION);
options.addOption(HELP_OPTION);
INDEX_REBUILD_OPTION.setOptionalArg(true);
options.addOption(INDEX_REBUILD_OPTION);
INDEX_TOOL_OPTION.setOptionalArg(true);
options.addOption(INDEX_TOOL_OPTION);
return options;
}
@VisibleForTesting
public void initializeTool(CommandLine cmdLine) {
operation = cmdLine.getOptionValue(OPERATION_OPTION.getOpt());
inputTables = cmdLine.getOptionValue(TABLE_OPTION.getOpt());
logFile = cmdLine.getOptionValue(LOG_FILE_OPTION.getOpt());
inputFile = cmdLine.getOptionValue(TABLE_CSV_FILE_OPTION.getOpt());
dryRun = cmdLine.hasOption(DRY_RUN_OPTION.getOpt());
rebuild = cmdLine.hasOption(INDEX_REBUILD_OPTION.getOpt());
indexToolOpts = cmdLine.getOptionValue(INDEX_TOOL_OPTION.getOpt());
}
@VisibleForTesting
public void prepareToolSetup() {
try {
if (logFile != null) {
FileHandler fh = new FileHandler(logFile);
fh.setFormatter(new SimpleFormatter());
LOGGER.addHandler(fh);
}
prop.put(IndexUtil.INDEX_BUILDER_CONF_KEY, PhoenixIndexBuilder.class.getName());
prop.put(NonTxIndexBuilder.CODEC_CLASS_NAME_KEY, PhoenixIndexCodec.class.getName());
if (inputTables == null) {
inputTables = new String(Files.readAllBytes(Paths.get(inputFile)), StandardCharsets.UTF_8);
}
if (inputTables == null) {
LOGGER.severe("Tables' list is not available; use -tb or -f option");
}
LOGGER.info("list of tables passed: " + inputTables);
if (operation.equalsIgnoreCase(UPGRADE_OP)) {
upgrade = true;
} else if (operation.equalsIgnoreCase(ROLLBACK_OP)) {
upgrade = false;
} else {
throw new IllegalStateException("Invalid option provided for " + OPERATION_OPTION.getOpt()
+ " expected values: {upgrade, rollback}");
}
if (dryRun) {
LOGGER.info("This is the beginning of the tool with dry run.");
}
} catch (IOException e) {
LOGGER.severe("Something went wrong " + e);
System.exit(-1);
}
}
private static void setRpcRetriesAndTimeouts(Configuration conf) {
long indexRebuildQueryTimeoutMs = conf.getLong(QueryServices.INDEX_REBUILD_QUERY_TIMEOUT_ATTRIB,
QueryServicesOptions.DEFAULT_INDEX_REBUILD_QUERY_TIMEOUT);
long indexRebuildRPCTimeoutMs = conf.getLong(QueryServices.INDEX_REBUILD_RPC_TIMEOUT_ATTRIB,
QueryServicesOptions.DEFAULT_INDEX_REBUILD_RPC_TIMEOUT);
long indexRebuildClientScannerTimeOutMs =
conf.getLong(QueryServices.INDEX_REBUILD_CLIENT_SCANNER_TIMEOUT_ATTRIB,
QueryServicesOptions.DEFAULT_INDEX_REBUILD_CLIENT_SCANNER_TIMEOUT);
int indexRebuildRpcRetriesCounter = conf.getInt(QueryServices.INDEX_REBUILD_RPC_RETRIES_COUNTER,
QueryServicesOptions.DEFAULT_INDEX_REBUILD_RPC_RETRIES_COUNTER);
// Set phoenix and hbase level timeouts and rpc retries
conf.setLong(QueryServices.THREAD_TIMEOUT_MS_ATTRIB, indexRebuildQueryTimeoutMs);
conf.setLong(HConstants.HBASE_RPC_TIMEOUT_KEY, indexRebuildRPCTimeoutMs);
conf.setLong(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD,
indexRebuildClientScannerTimeOutMs);
conf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, indexRebuildRpcRetriesCounter);
}
@VisibleForTesting
public static Connection getConnection(Configuration conf) throws SQLException {
setRpcRetriesAndTimeouts(conf);
return ConnectionUtil.getInputConnection(conf);
}
@VisibleForTesting
public int executeTool() {
Configuration conf = HBaseConfiguration.addHbaseResources(getConf());
try (Connection conn = getConnection(conf)) {
ConnectionQueryServices queryServices =
conn.unwrap(PhoenixConnection.class).getQueryServices();
boolean status = extractTablesAndIndexes(conn.unwrap(PhoenixConnection.class));
if (status) {
return executeTool(conn, queryServices, conf);
}
} catch (SQLException e) {
LOGGER.severe("Something went wrong in executing tool " + e);
}
return -1;
}
private int executeTool(Connection conn, ConnectionQueryServices queryServices,
Configuration conf) {
ArrayList<String> immutableList = new ArrayList<>();
ArrayList<String> mutableList = new ArrayList<>();
for (Map.Entry<String, HashSet<String>> entry : tablesAndIndexes.entrySet()) {
String dataTableFullName = entry.getKey();
try {
PTable dataTable = conn.unwrap(PhoenixConnection.class).getTableNoCache(dataTableFullName);
if (dataTable.isImmutableRows()) {
// add to list where immutable tables are processed in a different function
immutableList.add(dataTableFullName);
} else {
mutableList.add(dataTableFullName);
}
} catch (SQLException e) {
LOGGER
.severe("Something went wrong while getting the PTable " + dataTableFullName + " " + e);
return -1;
}
}
long startWaitTime = executeToolForImmutableTables(queryServices, immutableList);
executeToolForMutableTables(conn, queryServices, conf, mutableList);
enableImmutableTables(queryServices, immutableList, startWaitTime);
rebuildIndexes(conn, conf, immutableList);
if (hasFailure) {
return -1;
} else {
return 0;
}
}
private long executeToolForImmutableTables(ConnectionQueryServices queryServices,
List<String> immutableList) {
if (immutableList.isEmpty()) {
return 0;
}
LOGGER.info("Started " + operation + " for immutable tables");
List<String> failedTables = new ArrayList<String>();
for (String dataTableFullName : immutableList) {
try (Admin admin = queryServices.getAdmin()) {
HashSet<String> indexes = tablesAndIndexes.get(dataTableFullName);
LOGGER.info("Executing " + operation + " of " + dataTableFullName + " (immutable)");
disableTable(admin, dataTableFullName, indexes);
modifyTable(admin, dataTableFullName, indexes);
} catch (Throwable e) {
LOGGER
.severe("Something went wrong while disabling " + "or modifying immutable table " + e);
handleFailure(queryServices, dataTableFullName, immutableList, failedTables);
}
}
immutableList.removeAll(failedTables);
long startWaitTime = EnvironmentEdgeManager.currentTimeMillis();
return startWaitTime;
}
private void executeToolForMutableTables(Connection conn, ConnectionQueryServices queryServices,
Configuration conf, ArrayList<String> mutableTables) {
if (mutableTables.isEmpty()) {
return;
}
LOGGER.info("Started " + operation + " for mutable tables");
List<String> failedTables = new ArrayList<>();
for (String dataTableFullName : mutableTables) {
try (Admin admin = queryServices.getAdmin()) {
HashSet<String> indexes = tablesAndIndexes.get(dataTableFullName);
LOGGER.info("Executing " + operation + " of " + dataTableFullName);
disableTable(admin, dataTableFullName, indexes);
modifyTable(admin, dataTableFullName, indexes);
enableTable(admin, dataTableFullName, indexes);
LOGGER.info("Completed " + operation + " of " + dataTableFullName);
} catch (Throwable e) {
LOGGER.severe("Something went wrong while executing " + operation + " steps for "
+ dataTableFullName + " " + e);
handleFailure(queryServices, dataTableFullName, mutableTables, failedTables);
}
}
mutableTables.removeAll(failedTables);
// Opportunistically kick-off index rebuilds after upgrade operation
rebuildIndexes(conn, conf, mutableTables);
}
private void handleFailure(ConnectionQueryServices queryServices, String dataTableFullName,
List<String> tableList, List<String> failedTables) {
hasFailure = true;
LOGGER.info("Performing error handling to revert the steps taken during " + operation);
HashSet<String> indexes = tablesAndIndexes.get(dataTableFullName);
try (Admin admin = queryServices.getAdmin()) {
upgrade = !upgrade;
disableTable(admin, dataTableFullName, indexes);
modifyTable(admin, dataTableFullName, indexes);
enableTable(admin, dataTableFullName, indexes);
upgrade = !upgrade;
tablesAndIndexes.remove(dataTableFullName); // removing from the map
failedTables.add(dataTableFullName); // everything in failed tables will later be
// removed from the list
LOGGER.severe(dataTableFullName + " has been removed from the list as tool failed"
+ " to perform " + operation);
} catch (Throwable e) {
LOGGER.severe("Revert of the " + operation + " failed in error handling, "
+ "re-enabling tables and then throwing runtime exception");
LOGGER.severe("Confirm the state for " + getSubListString(tableList, dataTableFullName));
try (Admin admin = queryServices.getAdmin()) {
enableTable(admin, dataTableFullName, indexes);
} catch (Exception ex) {
throw new RuntimeException("Error re-enabling tables after rollback failure. "
+ "Original exception that caused the rollback: [" + e.toString() + " " + "]", ex);
}
throw new RuntimeException(e);
}
}
private void enableImmutableTables(ConnectionQueryServices queryServices,
ArrayList<String> immutableList, long startWaitTime) {
if (immutableList.isEmpty()) {
return;
}
while (true) {
long waitMore = getWaitMoreTime(startWaitTime);
if (waitMore <= 0) {
isWaitComplete = true;
break;
}
try {
// If the table is immutable, we need to wait for clients to purge
// their caches of table metadata
Thread.sleep(waitMore);
isWaitComplete = true;
} catch (InterruptedException e) {
LOGGER.warning("Sleep before starting index rebuild is interrupted. "
+ "Attempting to sleep again! " + e.getMessage());
}
}
for (String dataTableFullName : immutableList) {
try (Admin admin = queryServices.getAdmin()) {
HashSet<String> indexes = tablesAndIndexes.get(dataTableFullName);
enableTable(admin, dataTableFullName, indexes);
} catch (IOException | SQLException e) {
LOGGER.severe("Something went wrong while enabling immutable table " + e);
// removing to avoid any rebuilds after upgrade
tablesAndIndexes.remove(dataTableFullName);
immutableList.remove(dataTableFullName);
throw new RuntimeException(
"Manually enable the following tables "
+ getSubListString(immutableList, dataTableFullName) + " and run the index rebuild ",
e);
}
}
}
private String getSubListString(List<String> tableList, String dataTableFullName) {
return StringUtils.join(",",
tableList.subList(tableList.indexOf(dataTableFullName), tableList.size()));
}
private long getWaitMoreTime(long startWaitTime) {
int waitTime = GLOBAL_INDEX_CHECKER_ENABLED_MAP_EXPIRATION_MIN + 1;
long endWaitTime = EnvironmentEdgeManager.currentTimeMillis();
if (test || dryRun) {
return 0; // no wait
}
return (((waitTime) * 60000) - Math.abs(endWaitTime - startWaitTime));
}
private void disableTable(Admin admin, String dataTable, HashSet<String> indexes)
throws IOException {
if (admin.isTableEnabled(TableName.valueOf(dataTable))) {
if (!dryRun) {
admin.disableTable(TableName.valueOf(dataTable));
}
LOGGER.info("Disabled data table " + dataTable);
} else {
LOGGER.info("Data table " + dataTable + " is already disabled");
}
for (String indexName : indexes) {
if (admin.isTableEnabled(TableName.valueOf(indexName))) {
if (!dryRun) {
admin.disableTable(TableName.valueOf(indexName));
}
LOGGER.info("Disabled index table " + indexName);
} else {
LOGGER.info("Index table " + indexName + " is already disabled");
}
}
}
private void modifyTable(Admin admin, String dataTableFullName, HashSet<String> indexes)
throws IOException {
if (upgrade) {
modifyIndexTable(admin, indexes);
modifyDataTable(admin, dataTableFullName);
if (test && failUpgradeTask) {
throw new RuntimeException("Test requested upgrade failure");
}
} else {
modifyDataTable(admin, dataTableFullName);
modifyIndexTable(admin, indexes);
if (test && failDowngradeTask) {
throw new RuntimeException("Test requested downgrade failure");
}
}
}
private void enableTable(Admin admin, String dataTable, Set<String> indexes) throws IOException {
if (!admin.isTableEnabled(TableName.valueOf(dataTable))) {
if (!dryRun) {
admin.enableTable(TableName.valueOf(dataTable));
}
LOGGER.info("Enabled data table " + dataTable);
} else {
LOGGER.info("Data table " + dataTable + " is already enabled");
}
for (String indexName : indexes) {
if (!admin.isTableEnabled(TableName.valueOf(indexName))) {
if (!dryRun) {
admin.enableTable(TableName.valueOf(indexName));
}
LOGGER.info("Enabled index table " + indexName);
} else {
LOGGER.info("Index table " + indexName + " is already enabled");
}
}
}
private void rebuildIndexes(Connection conn, Configuration conf, ArrayList<String> tableList) {
if (!upgrade || !rebuild) {
return;
}
for (String table : tableList) {
rebuildIndexes(conn, conf, table);
}
}
private void rebuildIndexes(Connection conn, Configuration conf, String dataTableFullName) {
try {
HashMap<String, IndexInfo> rebuildMap = prepareToRebuildIndexes(conn, dataTableFullName);
// for rebuilding indexes in case of upgrade and if there are indexes on the table/view.
if (rebuildMap.isEmpty()) {
LOGGER.info("No indexes to rebuild for table " + dataTableFullName);
return;
}
if (!test) {
indexingTool = new IndexTool();
indexingTool.setConf(conf);
}
startIndexRebuilds(rebuildMap, indexingTool);
} catch (SQLException e) {
LOGGER.severe("Failed to prepare the map for index rebuilds " + e);
throw new RuntimeException("Failed to prepare the map for index rebuilds");
}
}
private void modifyDataTable(Admin admin, String tableName) throws IOException {
TableDescriptorBuilder tableDescBuilder =
TableDescriptorBuilder.newBuilder(admin.getDescriptor(TableName.valueOf(tableName)));
if (upgrade) {
removeCoprocessor(admin, tableName, tableDescBuilder, Indexer.class.getName());
addCoprocessor(admin, tableName, tableDescBuilder, IndexRegionObserver.class.getName());
} else {
removeCoprocessor(admin, tableName, tableDescBuilder, IndexRegionObserver.class.getName());
addCoprocessor(admin, tableName, tableDescBuilder, Indexer.class.getName());
}
if (!dryRun) {
admin.modifyTable(tableDescBuilder.build());
}
}
private void addCoprocessor(Admin admin, String tableName,
TableDescriptorBuilder tableDescBuilder, String coprocName) throws IOException {
addCoprocessor(admin, tableName, tableDescBuilder, coprocName,
QueryServicesOptions.DEFAULT_COPROCESSOR_PRIORITY, prop);
}
private void addCoprocessor(Admin admin, String tableName,
TableDescriptorBuilder tableDescBuilder, String coprocName, int priority,
Map<String, String> propsToAdd) throws IOException {
if (!admin.getDescriptor(TableName.valueOf(tableName)).hasCoprocessor(coprocName)) {
if (!dryRun) {
CoprocessorDescriptorBuilder coprocBuilder =
CoprocessorDescriptorBuilder.newBuilder(coprocName);
coprocBuilder.setPriority(priority).setProperties(propsToAdd);
tableDescBuilder.setCoprocessor(coprocBuilder.build());
}
LOGGER.info("Loaded " + coprocName + " coprocessor on table " + tableName);
} else {
LOGGER.info(coprocName + " coprocessor on table " + tableName + "is already loaded");
}
}
private void removeCoprocessor(Admin admin, String tableName,
TableDescriptorBuilder tableDescBuilder, String coprocName) throws IOException {
if (admin.getDescriptor(TableName.valueOf(tableName)).hasCoprocessor(coprocName)) {
if (!dryRun) {
tableDescBuilder.removeCoprocessor(coprocName);
}
LOGGER.info("Unloaded " + coprocName + "coprocessor on table " + tableName);
} else {
LOGGER.info(coprocName + " coprocessor on table " + tableName + " is already unloaded");
}
}
private void modifyIndexTable(Admin admin, HashSet<String> indexes) throws IOException {
for (String indexName : indexes) {
TableDescriptorBuilder indexTableDescBuilder =
TableDescriptorBuilder.newBuilder(admin.getDescriptor(TableName.valueOf(indexName)));
if (upgrade) {
// GlobalIndexChecker needs to be a "lower" priority than all the others so that it
// goes first. It also doesn't get the codec props the IndexRegionObserver needs
addCoprocessor(admin, indexName, indexTableDescBuilder, GlobalIndexChecker.class.getName(),
QueryServicesOptions.DEFAULT_COPROCESSOR_PRIORITY - 1, emptyProp);
} else {
removeCoprocessor(admin, indexName, indexTableDescBuilder,
GlobalIndexChecker.class.getName());
}
if (!dryRun) {
admin.modifyTable(indexTableDescBuilder.build());
}
}
}
private int startIndexRebuilds(HashMap<String, IndexInfo> indexInfos, IndexTool indexingTool) {
for (Map.Entry<String, IndexInfo> entry : indexInfos.entrySet()) {
String index = entry.getKey();
IndexInfo indexInfo = entry.getValue();
String indexName = SchemaUtil.getTableNameFromFullName(index);
String tenantId = indexInfo.getTenantId();
String baseTable = indexInfo.getBaseTable();
String schema = indexInfo.getSchemaName();
String outFile = "/tmp/index_rebuild_" + schema + "_" + indexName
+ (GLOBAL_INDEX_ID.equals(tenantId) ? "" : "_" + tenantId) + "_"
+ UUID.randomUUID().toString();
String[] args = getIndexToolArgValues(schema, baseTable, indexName, outFile, tenantId);
try {
LOGGER.info("Rebuilding index: " + String.join(",", args));
if (!dryRun) {
indexingTool.run(args);
}
} catch (Exception e) {
LOGGER.severe("Something went wrong while building the index " + index + " " + e);
return -1;
}
}
return 0;
}
public String[] getIndexToolArgValues(String schema, String baseTable, String indexName,
String outFile, String tenantId) {
String args[] = { "-s", schema, "-dt", baseTable, "-it", indexName, "-direct", "-op", outFile };
ArrayList<String> list = new ArrayList<>(Arrays.asList(args));
if (!GLOBAL_INDEX_ID.equals(tenantId)) {
list.add("-tenant");
list.add(tenantId);
}
if (!Strings.isNullOrEmpty(indexToolOpts)) {
String[] options = indexToolOpts.split("\\s+");
for (String opt : options) {
list.add(opt);
}
}
return list.toArray(new String[list.size()]);
}
private boolean extractTablesAndIndexes(PhoenixConnection conn) {
String[] tables = inputTables.trim().split(",");
PTable dataTable = null;
try {
for (String tableName : tables) {
HashSet<String> physicalIndexes = new HashSet<>();
dataTable = conn.getTableNoCache(tableName);
String physicalTableName = dataTable.getPhysicalName().getString();
if (!dataTable.isTransactional() && dataTable.getType().equals(PTableType.TABLE)) {
for (PTable indexTable : dataTable.getIndexes()) {
if (IndexUtil.isGlobalIndex(indexTable)) {
String physicalIndexName = indexTable.getPhysicalName().getString();
physicalIndexes.add(physicalIndexName);
}
}
if (MetaDataUtil.hasViewIndexTable(conn, dataTable.getPhysicalName())) {
String viewIndexPhysicalName = MetaDataUtil.getViewIndexPhysicalName(physicalTableName);
physicalIndexes.add(viewIndexPhysicalName);
}
// for upgrade or rollback
tablesAndIndexes.put(physicalTableName, physicalIndexes);
} else {
LOGGER.info("Skipping Table " + tableName + " because it is "
+ (dataTable.isTransactional() ? "transactional" : "not a data table"));
}
}
return true;
} catch (SQLException e) {
LOGGER.severe("Failed to find list of indexes " + e);
if (dataTable == null) {
LOGGER.severe("Unable to find the provided data table");
}
return false;
}
}
private HashMap<String, IndexInfo> prepareToRebuildIndexes(Connection conn,
String dataTableFullName) throws SQLException {
HashMap<String, IndexInfo> indexInfos = new HashMap<>();
HashSet<String> physicalIndexes = tablesAndIndexes.get(dataTableFullName);
String viewIndexPhysicalName = MetaDataUtil.getViewIndexPhysicalName(dataTableFullName);
boolean hasViewIndex = physicalIndexes.contains(viewIndexPhysicalName);
String schemaName = SchemaUtil.getSchemaNameFromFullName(dataTableFullName);
String tableName = SchemaUtil.getTableNameFromFullName(dataTableFullName);
for (String physicalIndexName : physicalIndexes) {
if (physicalIndexName.equals(viewIndexPhysicalName)) {
continue;
}
String indexTableName = SchemaUtil.getTableNameFromFullName(physicalIndexName);
String pIndexName = SchemaUtil.getTableName(schemaName, indexTableName);
IndexInfo indexInfo = new IndexInfo(schemaName, tableName, GLOBAL_INDEX_ID, pIndexName);
indexInfos.put(physicalIndexName, indexInfo);
}
if (hasViewIndex) {
String viewSql = getViewSql(tableName, schemaName);
ResultSet rs = conn.createStatement().executeQuery(viewSql);
while (rs.next()) {
String viewFullName = rs.getString(1);
String viewName = SchemaUtil.getTableNameFromFullName(viewFullName);
String tenantId = rs.getString(2);
ArrayList<String> viewIndexes = findViewIndexes(conn, schemaName, viewName, tenantId);
for (String viewIndex : viewIndexes) {
IndexInfo indexInfo = new IndexInfo(schemaName, viewName,
tenantId == null ? GLOBAL_INDEX_ID : tenantId, viewIndex);
indexInfos.put(viewIndex, indexInfo);
}
}
}
return indexInfos;
}
@VisibleForTesting
public static String getViewSql(String tableName, String schemaName) {
// column_family has the view name and column_name has the Tenant ID
return "SELECT DISTINCT COLUMN_FAMILY, COLUMN_NAME FROM " + "SYSTEM.CHILD_LINK "
+ "WHERE TABLE_NAME = \'" + tableName + "\'"
+ (!Strings.isNullOrEmpty(schemaName) ? " AND TABLE_SCHEM = \'" + schemaName + "\'" : "")
+ " AND LINK_TYPE = " + PTable.LinkType.CHILD_TABLE.getSerializedValue();
}
private ArrayList<String> findViewIndexes(Connection conn, String schemaName, String viewName,
String tenantId) throws SQLException {
String viewIndexesSql = getViewIndexesSql(viewName, schemaName, tenantId);
ArrayList<String> viewIndexes = new ArrayList<>();
long stime = EnvironmentEdgeManager.currentTimeMillis();
ResultSet rs = conn.createStatement().executeQuery(viewIndexesSql);
long etime = EnvironmentEdgeManager.currentTimeMillis();
LOGGER.info(String.format("Query %s took %d ms ", viewIndexesSql, (etime - stime)));
while (rs.next()) {
String viewIndexName = rs.getString(1);
viewIndexes.add(viewIndexName);
}
return viewIndexes;
}
@VisibleForTesting
public static String getViewIndexesSql(String viewName, String schemaName, String tenantId) {
return "SELECT DISTINCT COLUMN_FAMILY FROM " + "SYSTEM.CATALOG " + "WHERE TABLE_NAME = \'"
+ viewName + "\'"
+ (!Strings.isNullOrEmpty(schemaName) ? " AND TABLE_SCHEM = \'" + schemaName + "\'" : "")
+ " AND LINK_TYPE = " + PTable.LinkType.INDEX_TABLE.getSerializedValue()
+ (tenantId != null ? " AND TENANT_ID = \'" + tenantId + "\'" : " AND TENANT_ID IS NULL");
}
private static class IndexInfo {
final private String schemaName;
final private String baseTable;
final private String tenantId;
final private String indexName;
public IndexInfo(String schemaName, String baseTable, String tenantId, String indexName) {
this.schemaName = schemaName;
this.baseTable = baseTable;
this.tenantId = tenantId;
this.indexName = indexName;
}
public String getSchemaName() {
return schemaName;
}
public String getBaseTable() {
return baseTable;
}
public String getTenantId() {
return tenantId;
}
public String getIndexName() {
return indexName;
}
}
public static void main(String[] args) throws Exception {
int result = ToolRunner.run(new IndexUpgradeTool(), args);
System.exit(result);
}
}
|
googleapis/google-cloud-java | 36,134 | java-shopping-merchant-inventories/proto-google-shopping-merchant-inventories-v1/src/main/java/com/google/shopping/merchant/inventories/v1/InsertRegionalInventoryRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/shopping/merchant/inventories/v1/regionalinventory.proto
// Protobuf Java Version: 3.25.8
package com.google.shopping.merchant.inventories.v1;
/**
*
*
* <pre>
* Request message for the `InsertRegionalInventory` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest}
*/
public final class InsertRegionalInventoryRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest)
InsertRegionalInventoryRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use InsertRegionalInventoryRequest.newBuilder() to construct.
private InsertRegionalInventoryRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private InsertRegionalInventoryRequest() {
parent_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new InsertRegionalInventoryRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.inventories.v1.RegionalInventoryProto
.internal_static_google_shopping_merchant_inventories_v1_InsertRegionalInventoryRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.inventories.v1.RegionalInventoryProto
.internal_static_google_shopping_merchant_inventories_v1_InsertRegionalInventoryRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest.class,
com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest.Builder
.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The account and product where this inventory will be inserted.
* Format: `accounts/{account}/products/{product}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The account and product where this inventory will be inserted.
* Format: `accounts/{account}/products/{product}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REGIONAL_INVENTORY_FIELD_NUMBER = 2;
private com.google.shopping.merchant.inventories.v1.RegionalInventory regionalInventory_;
/**
*
*
* <pre>
* Required. Regional inventory information to add to the product. If the
* product already has a `RegionalInventory` resource for the same `region`,
* full replacement of the `RegionalInventory` resource is performed.
* </pre>
*
* <code>
* .google.shopping.merchant.inventories.v1.RegionalInventory regional_inventory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the regionalInventory field is set.
*/
@java.lang.Override
public boolean hasRegionalInventory() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Regional inventory information to add to the product. If the
* product already has a `RegionalInventory` resource for the same `region`,
* full replacement of the `RegionalInventory` resource is performed.
* </pre>
*
* <code>
* .google.shopping.merchant.inventories.v1.RegionalInventory regional_inventory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The regionalInventory.
*/
@java.lang.Override
public com.google.shopping.merchant.inventories.v1.RegionalInventory getRegionalInventory() {
return regionalInventory_ == null
? com.google.shopping.merchant.inventories.v1.RegionalInventory.getDefaultInstance()
: regionalInventory_;
}
/**
*
*
* <pre>
* Required. Regional inventory information to add to the product. If the
* product already has a `RegionalInventory` resource for the same `region`,
* full replacement of the `RegionalInventory` resource is performed.
* </pre>
*
* <code>
* .google.shopping.merchant.inventories.v1.RegionalInventory regional_inventory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.shopping.merchant.inventories.v1.RegionalInventoryOrBuilder
getRegionalInventoryOrBuilder() {
return regionalInventory_ == null
? com.google.shopping.merchant.inventories.v1.RegionalInventory.getDefaultInstance()
: regionalInventory_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getRegionalInventory());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getRegionalInventory());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest)) {
return super.equals(obj);
}
com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest other =
(com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasRegionalInventory() != other.hasRegionalInventory()) return false;
if (hasRegionalInventory()) {
if (!getRegionalInventory().equals(other.getRegionalInventory())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasRegionalInventory()) {
hash = (37 * hash) + REGIONAL_INVENTORY_FIELD_NUMBER;
hash = (53 * hash) + getRegionalInventory().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for the `InsertRegionalInventory` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest)
com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.inventories.v1.RegionalInventoryProto
.internal_static_google_shopping_merchant_inventories_v1_InsertRegionalInventoryRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.inventories.v1.RegionalInventoryProto
.internal_static_google_shopping_merchant_inventories_v1_InsertRegionalInventoryRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest.class,
com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest.Builder
.class);
}
// Construct using
// com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getRegionalInventoryFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
regionalInventory_ = null;
if (regionalInventoryBuilder_ != null) {
regionalInventoryBuilder_.dispose();
regionalInventoryBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.shopping.merchant.inventories.v1.RegionalInventoryProto
.internal_static_google_shopping_merchant_inventories_v1_InsertRegionalInventoryRequest_descriptor;
}
@java.lang.Override
public com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest
getDefaultInstanceForType() {
return com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest build() {
com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest
buildPartial() {
com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest result =
new com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.regionalInventory_ =
regionalInventoryBuilder_ == null
? regionalInventory_
: regionalInventoryBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest) {
return mergeFrom(
(com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest other) {
if (other
== com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest
.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasRegionalInventory()) {
mergeRegionalInventory(other.getRegionalInventory());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(
getRegionalInventoryFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The account and product where this inventory will be inserted.
* Format: `accounts/{account}/products/{product}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The account and product where this inventory will be inserted.
* Format: `accounts/{account}/products/{product}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The account and product where this inventory will be inserted.
* Format: `accounts/{account}/products/{product}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The account and product where this inventory will be inserted.
* Format: `accounts/{account}/products/{product}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The account and product where this inventory will be inserted.
* Format: `accounts/{account}/products/{product}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.shopping.merchant.inventories.v1.RegionalInventory regionalInventory_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.inventories.v1.RegionalInventory,
com.google.shopping.merchant.inventories.v1.RegionalInventory.Builder,
com.google.shopping.merchant.inventories.v1.RegionalInventoryOrBuilder>
regionalInventoryBuilder_;
/**
*
*
* <pre>
* Required. Regional inventory information to add to the product. If the
* product already has a `RegionalInventory` resource for the same `region`,
* full replacement of the `RegionalInventory` resource is performed.
* </pre>
*
* <code>
* .google.shopping.merchant.inventories.v1.RegionalInventory regional_inventory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the regionalInventory field is set.
*/
public boolean hasRegionalInventory() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Regional inventory information to add to the product. If the
* product already has a `RegionalInventory` resource for the same `region`,
* full replacement of the `RegionalInventory` resource is performed.
* </pre>
*
* <code>
* .google.shopping.merchant.inventories.v1.RegionalInventory regional_inventory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The regionalInventory.
*/
public com.google.shopping.merchant.inventories.v1.RegionalInventory getRegionalInventory() {
if (regionalInventoryBuilder_ == null) {
return regionalInventory_ == null
? com.google.shopping.merchant.inventories.v1.RegionalInventory.getDefaultInstance()
: regionalInventory_;
} else {
return regionalInventoryBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Regional inventory information to add to the product. If the
* product already has a `RegionalInventory` resource for the same `region`,
* full replacement of the `RegionalInventory` resource is performed.
* </pre>
*
* <code>
* .google.shopping.merchant.inventories.v1.RegionalInventory regional_inventory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRegionalInventory(
com.google.shopping.merchant.inventories.v1.RegionalInventory value) {
if (regionalInventoryBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
regionalInventory_ = value;
} else {
regionalInventoryBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Regional inventory information to add to the product. If the
* product already has a `RegionalInventory` resource for the same `region`,
* full replacement of the `RegionalInventory` resource is performed.
* </pre>
*
* <code>
* .google.shopping.merchant.inventories.v1.RegionalInventory regional_inventory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRegionalInventory(
com.google.shopping.merchant.inventories.v1.RegionalInventory.Builder builderForValue) {
if (regionalInventoryBuilder_ == null) {
regionalInventory_ = builderForValue.build();
} else {
regionalInventoryBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Regional inventory information to add to the product. If the
* product already has a `RegionalInventory` resource for the same `region`,
* full replacement of the `RegionalInventory` resource is performed.
* </pre>
*
* <code>
* .google.shopping.merchant.inventories.v1.RegionalInventory regional_inventory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeRegionalInventory(
com.google.shopping.merchant.inventories.v1.RegionalInventory value) {
if (regionalInventoryBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& regionalInventory_ != null
&& regionalInventory_
!= com.google.shopping.merchant.inventories.v1.RegionalInventory
.getDefaultInstance()) {
getRegionalInventoryBuilder().mergeFrom(value);
} else {
regionalInventory_ = value;
}
} else {
regionalInventoryBuilder_.mergeFrom(value);
}
if (regionalInventory_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Regional inventory information to add to the product. If the
* product already has a `RegionalInventory` resource for the same `region`,
* full replacement of the `RegionalInventory` resource is performed.
* </pre>
*
* <code>
* .google.shopping.merchant.inventories.v1.RegionalInventory regional_inventory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearRegionalInventory() {
bitField0_ = (bitField0_ & ~0x00000002);
regionalInventory_ = null;
if (regionalInventoryBuilder_ != null) {
regionalInventoryBuilder_.dispose();
regionalInventoryBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Regional inventory information to add to the product. If the
* product already has a `RegionalInventory` resource for the same `region`,
* full replacement of the `RegionalInventory` resource is performed.
* </pre>
*
* <code>
* .google.shopping.merchant.inventories.v1.RegionalInventory regional_inventory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.shopping.merchant.inventories.v1.RegionalInventory.Builder
getRegionalInventoryBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getRegionalInventoryFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Regional inventory information to add to the product. If the
* product already has a `RegionalInventory` resource for the same `region`,
* full replacement of the `RegionalInventory` resource is performed.
* </pre>
*
* <code>
* .google.shopping.merchant.inventories.v1.RegionalInventory regional_inventory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.shopping.merchant.inventories.v1.RegionalInventoryOrBuilder
getRegionalInventoryOrBuilder() {
if (regionalInventoryBuilder_ != null) {
return regionalInventoryBuilder_.getMessageOrBuilder();
} else {
return regionalInventory_ == null
? com.google.shopping.merchant.inventories.v1.RegionalInventory.getDefaultInstance()
: regionalInventory_;
}
}
/**
*
*
* <pre>
* Required. Regional inventory information to add to the product. If the
* product already has a `RegionalInventory` resource for the same `region`,
* full replacement of the `RegionalInventory` resource is performed.
* </pre>
*
* <code>
* .google.shopping.merchant.inventories.v1.RegionalInventory regional_inventory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.inventories.v1.RegionalInventory,
com.google.shopping.merchant.inventories.v1.RegionalInventory.Builder,
com.google.shopping.merchant.inventories.v1.RegionalInventoryOrBuilder>
getRegionalInventoryFieldBuilder() {
if (regionalInventoryBuilder_ == null) {
regionalInventoryBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.inventories.v1.RegionalInventory,
com.google.shopping.merchant.inventories.v1.RegionalInventory.Builder,
com.google.shopping.merchant.inventories.v1.RegionalInventoryOrBuilder>(
getRegionalInventory(), getParentForChildren(), isClean());
regionalInventory_ = null;
}
return regionalInventoryBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest)
}
// @@protoc_insertion_point(class_scope:google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest)
private static final com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest();
}
public static com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<InsertRegionalInventoryRequest> PARSER =
new com.google.protobuf.AbstractParser<InsertRegionalInventoryRequest>() {
@java.lang.Override
public InsertRegionalInventoryRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<InsertRegionalInventoryRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<InsertRegionalInventoryRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/juneau | 34,575 | juneau-rest/juneau-rest-server/src/main/java/org/apache/juneau/rest/annotation/RestOp.java | // ***************************************************************************************************************************
// * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file *
// * distributed with this work for additional information regarding copyright ownership. The ASF licenses this file *
// * to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance *
// * with the License. You may obtain a copy of the License at *
// * *
// * http://www.apache.org/licenses/LICENSE-2.0 *
// * *
// * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an *
// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the *
// * specific language governing permissions and limitations under the License. *
// ***************************************************************************************************************************
package org.apache.juneau.rest.annotation;
import static java.lang.annotation.ElementType.*;
import static java.lang.annotation.RetentionPolicy.*;
import java.lang.annotation.*;
import java.nio.charset.*;
import org.apache.juneau.*;
import org.apache.juneau.annotation.*;
import org.apache.juneau.bean.swagger.*;
import org.apache.juneau.encoders.*;
import org.apache.juneau.http.remote.*;
import org.apache.juneau.parser.*;
import org.apache.juneau.rest.*;
import org.apache.juneau.rest.converter.*;
import org.apache.juneau.rest.guard.*;
import org.apache.juneau.rest.httppart.*;
import org.apache.juneau.rest.matcher.*;
import org.apache.juneau.rest.servlet.*;
import org.apache.juneau.rest.swagger.*;
import org.apache.juneau.serializer.*;
/**
* Identifies a REST operation Java method on a {@link RestServlet} implementation class.
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='link'><a class="doclink" href="https://juneau.apache.org/docs/topics/RestOpAnnotatedMethodBasics">@RestOp-Annotated Method Basics</a>
* </ul>
*/
@Target(METHOD)
@Retention(RUNTIME)
@Inherited
@ContextApply(RestOpAnnotation.RestOpContextApply.class)
@AnnotationGroup(RestOp.class)
public @interface RestOp {
/**
* Specifies whether this method can be called based on the client version.
*
* <p>
* The client version is identified via the HTTP request header identified by
* {@link Rest#clientVersionHeader() @Rest(clientVersionHeader)} which by default is <js>"Client-Version"</js>.
*
* <p>
* This is a specialized kind of {@link RestMatcher} that allows you to invoke different Java methods for the same
* method/path based on the client version.
*
* <p>
* The format of the client version range is similar to that of OSGi versions.
*
* <p>
* In the following example, the Java methods are mapped to the same HTTP method and URL <js>"/foobar"</js>.
* <p class='bjava'>
* <jc>// Call this method if Client-Version is at least 2.0.
* // Note that this also matches 2.0.1.</jc>
* <ja>@RestOp</ja>(method=<jsf>GET</jsf>, path=<js>"/foobar"</js>, clientVersion=<js>"2.0"</js>)
* <jk>public</jk> Object method1() {...}
*
* <jc>// Call this method if Client-Version is at least 1.1, but less than 2.0.</jc>
* <ja>@RestOp</ja>(method=<jsf>GET</jsf>, path=<js>"/foobar"</js>, clientVersion=<js>"[1.1,2.0)"</js>)
* <jk>public</jk> Object method2() {...}
*
* <jc>// Call this method if Client-Version is less than 1.1.</jc>
* <ja>@RestOp</ja>(method=<jsf>GET</jsf>, path=<js>"/foobar"</js>, clientVersion=<js>"[0,1.1)"</js>)
* <jk>public</jk> Object method3() {...}
* </p>
*
* <p>
* It's common to combine the client version with transforms that will convert new POJOs into older POJOs for
* backwards compatibility.
* <p class='bjava'>
* <jc>// Call this method if Client-Version is at least 2.0.</jc>
* <ja>@RestOp</ja>(method=<jsf>GET</jsf>, path=<js>"/foobar"</js>, clientVersion=<js>"2.0"</js>)
* <jk>public</jk> NewPojo newMethod() {...}
*
* <jc>// Call this method if X-Client-Version is at least 1.1, but less than 2.0.</jc>
* <ja>@RestOp</ja>(method=<jsf>GET</jsf>, path=<js>"/foobar"</js>, clientVersion=<js>"[1.1,2.0)"</js>)
* <ja>@BeanConfig(swaps=NewToOldSwap.<jk>class</jk>)
* <jk>public</jk> NewPojo oldMethod() {
* <jk>return</jk> newMethod();
* }
*
* <p>
* Note that in the previous example, we're returning the exact same POJO, but using a transform to convert it into
* an older form.
* The old method could also just return back a completely different object.
* The range can be any of the following:
* <ul>
* <li><js>"[0,1.0)"</js> = Less than 1.0. 1.0 and 1.0.0 does not match.
* <li><js>"[0,1.0]"</js> = Less than or equal to 1.0. Note that 1.0.1 will match.
* <li><js>"1.0"</js> = At least 1.0. 1.0 and 2.0 will match.
* </ul>
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jm'>{@link org.apache.juneau.rest.RestContext.Builder#clientVersionHeader(String)}
* </ul>
*
* @return The annotation value.
*/
String clientVersion() default "";
/**
* Supported content media types.
*
* <p>
* Overrides the media types inferred from the parsers that identify what media types can be consumed by the resource.
*
* <h5 class='section'>Notes:</h5><ul>
* <li class='note'>
* Supports <a class="doclink" href="https://juneau.apache.org/docs/topics/RestServerSvlVariables">SVL Variables</a>
* (e.g. <js>"$S{mySystemProperty}"</js>).
* </ul>
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jm'>{@link org.apache.juneau.rest.RestOpContext.Builder#consumes(MediaType...)}
* </ul>
*
* @return The annotation value.
*/
String[] consumes() default {};
/**
* Class-level response converters.
*
* <p>
* Associates one or more {@link RestConverter converters} with this method.
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jm'>{@link org.apache.juneau.rest.RestOpContext.Builder#converters()} - Registering converters with REST resources.
* </ul>
*
* @return The annotation value.
*/
Class<? extends RestConverter>[] converters() default {};
/**
* Enable debug mode.
*
* <p>
* Enables the following:
* <ul class='spaced-list'>
* <li>
* HTTP request/response bodies are cached in memory for logging purposes.
* <li>
* Request/response messages are automatically logged.
* </ul>
*
* <ul class='values'>
* <li><js>"true"</js> - Debug is enabled for all requests.
* <li><js>"false"</js> - Debug is disabled for all requests.
* <li><js>"conditional"</js> - Debug is enabled only for requests that have a <c class='snippet'>Debug: true</c> header.
* <li><js>""</js> (or anything else) - Debug mode is inherited from class.
* </ul>
*
* <h5 class='section'>Notes:</h5><ul>
* <li class='note'>
* Supports <a class="doclink" href="https://juneau.apache.org/docs/topics/RestServerSvlVariables">SVL Variables</a>
* (e.g. <js>"$L{my.localized.variable}"</js>).
* </ul>
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jm'>{@link org.apache.juneau.rest.RestContext.Builder#debugEnablement()}
* </ul>
*
* @return The annotation value.
*/
String debug() default "";
/**
* Default <c>Accept</c> header.
*
* <p>
* The default value for the <c>Accept</c> header if not specified on a request.
*
* <p>
* This is a shortcut for using {@link #defaultRequestHeaders()} for just this specific header.
*
* @return The annotation value.
*/
String defaultAccept() default "";
/**
* Default character encoding.
*
* <p>
* The default character encoding for the request and response if not specified on the request.
*
* <h5 class='section'>Notes:</h5><ul>
* <li class='note'>
* Supports <a class="doclink" href="https://juneau.apache.org/docs/topics/RestServerSvlVariables">SVL Variables</a>
* (e.g. <js>"$S{mySystemProperty}"</js>).
* </ul>
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jm'>{@link org.apache.juneau.rest.RestContext.Builder#defaultCharset(Charset)}
* <li class='jm'>{@link org.apache.juneau.rest.RestOpContext.Builder#defaultCharset(Charset)}
* <li class='ja'>{@link Rest#defaultCharset}
* </ul>
*
* @return The annotation value.
*/
String defaultCharset() default "";
/**
* Default <c>Content-Type</c> header.
*
* <p>
* The default value for the <c>Content-Type</c> header if not specified on a request.
*
* <p>
* This is a shortcut for using {@link #defaultRequestHeaders()} for just this specific header.
*
* @return The annotation value.
*/
String defaultContentType() default "";
/**
* Specifies default values for form-data parameters.
*
* <p>
* Strings are of the format <js>"name=value"</js>.
*
* <p>
* Affects values returned by {@link RestRequest#getFormParam(String)} when the parameter is not present on the
* request.
*
* <h5 class='section'>Example:</h5>
* <p class='bjava'>
* <ja>@RestOp</ja>(method=<jsf>POST</jsf>, path=<js>"/*"</js>, defaultRequestFormData={<js>"foo=bar"</js>})
* <jk>public</jk> String doPost(<ja>@FormData</ja>(<js>"foo"</js>) String <jv>foo</jv>) {...}
* </p>
*
* <h5 class='section'>Notes:</h5><ul>
* <li class='note'>
* You can use either <js>':'</js> or <js>'='</js> as the key/value delimiter.
* <li class='note'>
* Key and value is trimmed of whitespace.
* <li class='note'>
* Supports <a class="doclink" href="https://juneau.apache.org/docs/topics/RestServerSvlVariables">SVL Variables</a>
* (e.g. <js>"$S{mySystemProperty}"</js>).
* </ul>
*
* @return The annotation value.
*/
String[] defaultRequestFormData() default {};
/**
* Specifies default values for query parameters.
*
* <p>
* Strings are of the format <js>"name=value"</js>.
*
* <p>
* Affects values returned by {@link RestRequest#getQueryParam(String)} when the parameter is not present on the request.
*
* <h5 class='section'>Example:</h5>
* <p class='bjava'>
* <ja>@RestOp</ja>(method=<jsf>GET</jsf>, path=<js>"/*"</js>, defaultRequestQueryData={<js>"foo=bar"</js>})
* <jk>public</jk> String doGet(<ja>@Query</ja>(<js>"foo"</js>) String <jv>foo</jv>) {...}
* </p>
*
* <h5 class='section'>Notes:</h5><ul>
* <li class='note'>
* You can use either <js>':'</js> or <js>'='</js> as the key/value delimiter.
* <li class='note'>
* Key and value is trimmed of whitespace.
* <li class='note'>
* Supports <a class="doclink" href="https://juneau.apache.org/docs/topics/RestServerSvlVariables">SVL Variables</a>
* (e.g. <js>"$S{mySystemProperty}"</js>).
* </ul>
*
* @return The annotation value.
*/
String[] defaultRequestQueryData() default {};
/**
* Default request attributes.
*
* <p>
* Specifies default values for request attributes if they're not already set on the request.
*
* <p>
* Affects values returned by the following methods:
* <ul>
* <li class='jm'>{@link RestRequest#getAttribute(String)}.
* <li class='jm'>{@link RestRequest#getAttributes()}.
* </ul>
*
* <h5 class='section'>Example:</h5>
* <p class='bjava'>
* <jc>// Defined via annotation resolving to a config file setting with default value.</jc>
* <ja>@Rest</ja>(defaultRequestAttributes={<js>"Foo=bar"</js>, <js>"Baz: $C{REST/myAttributeValue}"</js>})
* <jk>public class</jk> MyResource {
*
* <jc>// Override at the method level.</jc>
* <ja>@RestGet</ja>(defaultRequestAttributes={<js>"Foo: bar"</js>})
* <jk>public</jk> Object myMethod() {...}
* }
* </p>
*
* </ul>
* <h5 class='section'>Notes:</h5><ul>
* <li class='note'>
* Supports <a class="doclink" href="https://juneau.apache.org/docs/topics/RestServerSvlVariables">SVL Variables</a>
* (e.g. <js>"$L{my.localized.variable}"</js>).
* </ul>
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jm'>{@link org.apache.juneau.rest.RestContext.Builder#defaultRequestAttributes(NamedAttribute...)}
* <li class='ja'>{@link Rest#defaultRequestAttributes()}
* </ul>
*
* @return The annotation value.
*/
String[] defaultRequestAttributes() default {};
/**
* Default request headers.
*
* <p>
* Specifies default values for request headers if they're not passed in through the request.
*
* <h5 class='section'>Example:</h5>
* <p class='bjava'>
* <jc>// Assume "text/json" Accept value when Accept not specified</jc>
* <ja>@RestOp</ja>(method=<jsf>GET</jsf>, path=<js>"/*"</js>, defaultRequestHeaders={<js>"Accept: text/json"</js>})
* <jk>public</jk> String doGet() {...}
* </p>
*
* <h5 class='section'>Notes:</h5><ul>
* <li class='note'>
* Supports <a class="doclink" href="https://juneau.apache.org/docs/topics/RestServerSvlVariables">SVL Variables</a>
* (e.g. <js>"$S{mySystemProperty}"</js>).
* </ul>
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jm'>{@link org.apache.juneau.rest.RestContext.Builder#defaultRequestHeaders(org.apache.http.Header...)}
* </ul>
*
* @return The annotation value.
*/
String[] defaultRequestHeaders() default {};
/**
* Default response headers.
*
* <p>
* Specifies default values for response headers if they're not overwritten during the request.
*
* <h5 class='section'>Example:</h5>
* <p class='bjava'>
* <jc>// Assume "text/json" Accept value when Accept not specified</jc>
* <ja>@RestOp</ja>(method=<jsf>GET</jsf>, path=<js>"/*"</js>, defaultResponseHeaders={<js>"Content-Type: text/json"</js>})
* <jk>public</jk> String doGet() {...}
* </p>
*
* <h5 class='section'>Notes:</h5><ul>
* <li class='note'>
* Supports <a class="doclink" href="https://juneau.apache.org/docs/topics/RestServerSvlVariables">SVL Variables</a>
* (e.g. <js>"$S{mySystemProperty}"</js>).
* </ul>
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jm'>{@link org.apache.juneau.rest.RestContext.Builder#defaultResponseHeaders(org.apache.http.Header...)}
* </ul>
*
* @return The annotation value.
*/
String[] defaultResponseHeaders() default {};
/**
* Optional description for the exposed API.
*
* <p>
* This description is used in the following locations:
* <ul class='spaced-list'>
* <li>
* The value returned by {@link Operation#getDescription()} in the auto-generated swagger.
* <li>
* The <js>"$RS{operationDescription}"</js> variable.
* <li>
* The description of the method in the Swagger page.
* </ul>
*
* <h5 class='section'>Notes:</h5><ul>
* <li class='note'>
* Corresponds to the swagger field <c>/paths/{path}/{method}/description</c>.
* <li class='note'>
* Supports <a class="doclink" href="https://juneau.apache.org/docs/topics/RestServerSvlVariables">SVL Variables</a>
* (e.g. <js>"$L{my.localized.variable}"</js>).
* </ul>
*
* @return The annotation value.
*/
String[] description() default {};
/**
* Specifies the compression encoders for this method.
*
* <p>
* Encoders are used to enable various kinds of compression (e.g. <js>"gzip"</js>) on requests and responses.
*
* <p>
* This value overrides encoders specified at the class level using {@link Rest#encoders()}.
* The {@link org.apache.juneau.encoders.EncoderSet.Inherit} class can be used to include values from the parent class.
*
* <h5 class='section'>Example:</h5>
* <p class='bjava'>
* <jc>// Define a REST resource that handles GZIP compression.</jc>
* <ja>@Rest</ja>(
* encoders={
* GzipEncoder.<jk>class</jk>
* }
* )
* <jk>public class</jk> MyResource {
*
* <jc>// Define a REST method that can also use a custom encoder.</jc>
* <ja>@RestOp</ja>(
* method=<jsf>GET</jsf>,
* encoders={
* EncoderSet.Inherit.<jk>class</jk>, MyEncoder.<jk>class</jk>
* }
* )
* <jk>public</jk> MyBean doGet() {
* ...
* }
* }
* </p>
*
* <p>
* The programmatic equivalent to this annotation is:
* <p class='bjava'>
* RestOpContext.Builder <jv>builder</jv> = RestOpContext.<jsm>create</jsm>(<jv>method</jv>,<jv>restContext</jv>);
* <jv>builder</jv>.getEncoders().set(<jv>classes</jv>);
* </p>
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='link'><a class="doclink" href="https://juneau.apache.org/docs/topics/RestServerEncoders">Encoders</a>
* </ul>
*
* @return The annotation value.
*/
Class<? extends Encoder>[] encoders() default {};
/**
* Method-level guards.
*
* <p>
* Associates one or more {@link RestGuard RestGuards} with this method.
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jm'>{@link org.apache.juneau.rest.RestOpContext.Builder#guards()}
* </ul>
*
* @return The annotation value.
*/
Class<? extends RestGuard>[] guards() default {};
/**
* Method matchers.
*
* <p>
* Associates one more more {@link RestMatcher RestMatchers} with this method.
*
* <p>
* Matchers are used to allow multiple Java methods to handle requests assigned to the same URL path pattern, but
* differing based on some request attribute, such as a specific header value.
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jac'>{@link RestMatcher}
* </ul>
*
* @return The annotation value.
*/
Class<? extends RestMatcher>[] matchers() default {};
/**
* The maximum allowed input size (in bytes) on HTTP requests.
*
* <p>
* Useful for alleviating DoS attacks by throwing an exception when too much input is received instead of resulting
* in out-of-memory errors which could affect system stability.
*
* <h5 class='section'>Example:</h5>
* <p class='bjava'>
* <ja>@RestOp</ja>(
* maxInput=<js>"100M"</js>
* )
* </p>
*
* <h5 class='section'>Notes:</h5><ul>
* <li>
* Supports <a class="doclink" href="https://juneau.apache.org/docs/topics/RestServerSvlVariables">SVL Variables</a>
* (e.g. <js>"$S{mySystemProperty}"</js>).
* </ul>
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jm'>{@link org.apache.juneau.rest.RestContext.Builder#maxInput(String)}
* <li class='jm'>{@link org.apache.juneau.rest.RestOpContext.Builder#maxInput(String)}
* <li class='ja'>{@link Rest#maxInput}
* </ul>
*
* @return The annotation value.
*/
String maxInput() default "";
/**
* REST method name.
*
* <p>
* Typically <js>"GET"</js>, <js>"PUT"</js>, <js>"POST"</js>, <js>"DELETE"</js>, or <js>"OPTIONS"</js>.
*
* <p>
* Method names are case-insensitive (always folded to upper-case).
*
* <p>
* Note that you can use {@link org.apache.juneau.http.HttpMethod} for constant values.
*
* <p>
* Note that you can also use {@link #value()} to specify the method name and path in shortened form.
*
* <p>
* Besides the standard HTTP method names, the following can also be specified:
* <ul class='spaced-list'>
* <li>
* <js>"*"</js>
* - Denotes any method.
* <br>Use this if you want to capture any HTTP methods in a single Java method.
* <br>The {@link Method @Method} annotation and/or {@link RestRequest#getMethod()} method can be used to
* distinguish the actual HTTP method name.
* <li>
* <js>""</js>
* - Auto-detect.
* <br>The method name is determined based on the Java method name.
* <br>For example, if the method is <c>doPost(...)</c>, then the method name is automatically detected
* as <js>"POST"</js>.
* <br>Otherwise, defaults to <js>"GET"</js>.
* <li>
* <js>"RRPC"</js>
* - Remote-proxy interface.
* <br>This denotes a Java method that returns an object (usually an interface, often annotated with the
* {@link Remote @Remote} annotation) to be used as a remote proxy using
* <c>RestClient.getRemoteInterface(Class<T> interfaceClass, String url)</c>.
* <br>This allows you to construct client-side interface proxies using REST as a transport medium.
* <br>Conceptually, this is simply a fancy <c>POST</c> against the url <js>"/{path}/{javaMethodName}"</js>
* where the arguments are marshalled from the client to the server as an HTTP content containing an array of
* objects, passed to the method as arguments, and then the resulting object is marshalled back to the client.
* <li>
* Anything else
* - Overloaded non-HTTP-standard names that are passed in through a <c>&method=methodName</c> URL
* parameter.
* </ul>
*
* @return The annotation value.
*/
String method() default "";
/**
* Dynamically apply this annotation to the specified methods.
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='link'><a class="doclink" href="https://juneau.apache.org/docs/topics/DynamicallyAppliedAnnotations">Dynamically Applied Annotations</a>
* </ul>
*
* @return The annotation value.
*/
String[] on() default {};
/**
* Specifies the parsers for converting HTTP request bodies into POJOs for this method.
*
* <p>
* Parsers are used to convert the content of HTTP requests into POJOs.
* <br>Any of the Juneau framework parsers can be used in this setting.
* <br>The parser selected is based on the request <c>Content-Type</c> header matched against the values returned by the following method
* using a best-match algorithm:
* <ul class='javatree'>
* <li class='jm'>{@link Parser#getMediaTypes()}
* </ul>
*
* <p>
* This value overrides parsers specified at the class level using {@link Rest#parsers()}.
* The {@link org.apache.juneau.parser.ParserSet.Inherit} class can be used to include values from the parent class.
*
* <h5 class='section'>Example:</h5>
* <p class='bjava'>
* <jc>// Define a REST resource that can consume JSON and HTML.</jc>
* <ja>@Rest</ja>(
* parsers={
* JsonParser.<jk>class</jk>,
* HtmlParser.<jk>class</jk>
* }
* )
* <jk>public class</jk> MyResource {
*
* <jc>// Define a REST method that can also consume XML.</jc>
* <ja>@RestOp</ja>(
* method=<jsf>POST</jsf>,
* parsers={
* ParserSet.Inherit.<jk>class</jk>, XmlParser.<jk>class</jk>
* }
* )
* <jk>public void</jk> doPost(MyBean <jv>bean</jv>) {
* ...
* }
* }
* </p>
*
* <p>
* The programmatic equivalent to this annotation is:
* <p class='bjava'>
* RestOpContext.Builder <jv>builder</jv> = RestOpContext.<jsm>create</jsm>(<jv>method</jv>,<jv>restContext</jv>);
* <jv>builder</jv>.getParsers().set(<jv>classes</jv>);
* </p>
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='link'><a class="doclink" href="https://juneau.apache.org/docs/topics/Marshalling">Marshalling</a>
* </ul>
*
* @return The annotation value.
*/
Class<?>[] parsers() default {};
/**
* Optional path pattern for the specified method.
*
* <p>
* Appending <js>"/*"</js> to the end of the path pattern will make it match any remainder too.
* <br>Not appending <js>"/*"</js> to the end of the pattern will cause a 404 (Not found) error to occur if the exact
* pattern is not found.
*
* <p>
* The path can contain variables that get resolved to {@link org.apache.juneau.http.annotation.Path @Path} parameters.
*
* <h5 class='figure'>Examples:</h5>
* <p class='bjava'>
* <ja>@RestOp</ja>(method=<jsf>GET</jsf>, path=<js>"/myurl/{foo}/{bar}/{baz}/*"</js>)
* </p>
* <p class='bjava'>
* <ja>@RestOp</ja>(method=<jsf>GET</jsf>, path=<js>"/myurl/{0}/{1}/{2}/*"</js>)
* </p>
*
* <p>
* If you do not specify a path name, then the path name is inferred from the Java method name.
*
* <h5 class='figure'>Example:</h5>
* <p class='bjava'>
* <jc>// Path is assumed to be "/foo".</jc>
* <ja>@RestOp</ja>(method=<jsf>GET</jsf>)
* <jk>public void</jk> foo() {...}
* </p>
*
* <p>
* If you also do not specify the {@link #method()} and the Java method name starts with <js>"get"</js>, <js>"put"</js>, <js>"post"</js>, or <js>"deleted"</js>,
* then the HTTP method name is stripped from the inferred path.
*
* <h5 class='figure'>Examples:</h5>
* <p class='bjava'>
* <jc>// Method is GET, path is "/foo".</jc>
* <ja>@RestOp</ja>
* <jk>public void</jk> getFoo() {...}
* </p>
* <p class='bjava'>
* <jc>// Method is DELETE, path is "/bar".</jc>
* <ja>@RestOp</ja>
* <jk>public void</jk> deleteBar() {...}
* </p>
* <p class='bjava'>
* <jc>// Method is GET, path is "/foobar".</jc>
* <ja>@RestOp</ja>
* <jk>public void</jk> foobar() {...}
* </p>
* <p class='bjava'>
* <jc>// Method is GET, path is "/".</jc>
* <ja>@RestOp</ja>
* <jk>public void</jk> get() {...}
* </p>
*
* <p>
* Note that you can also use {@link #value()} to specify the method name and path in shortened form.
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='ja'>{@link org.apache.juneau.http.annotation.Path}
* </ul>
*
* @return The annotation value.
*/
String[] path() default {};
/**
* Supported accept media types.
*
* <p>
* Overrides the media types inferred from the serializers that identify what media types can be produced by the resource.
*
* <h5 class='section'>Notes:</h5><ul>
* <li class='note'>
* Supports <a class="doclink" href="https://juneau.apache.org/docs/topics/RestServerSvlVariables">SVL Variables</a>
* (e.g. <js>"$S{mySystemProperty}"</js>).
* </ul>
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jm'>{@link org.apache.juneau.rest.RestOpContext.Builder#produces(MediaType...)}
* </ul>
*
* @return The annotation value.
*/
String[] produces() default {};
/**
* Role guard.
*
* <p>
* An expression defining if a user with the specified roles are allowed to access this method.
*
* <h5 class='section'>Example:</h5>
* <p class='bjava'>
* <jk>public class</jk> MyResource <jk>extends</jk> BasicRestServlet {
*
* <ja>@RestOp</ja>(
* method=<jsf>GET</jsf>,
* path=<js>"/foo"</js>,
* roleGuard=<js>"ROLE_ADMIN || (ROLE_READ_WRITE && ROLE_SPECIAL)"</js>
* )
* <jk>public</jk> Object doGet() {
* }
* }
* </p>
*
* <h5 class='section'>Notes:</h5><ul>
* <li class='note'>
* Supports any of the following expression constructs:
* <ul>
* <li><js>"foo"</js> - Single arguments.
* <li><js>"foo,bar,baz"</js> - Multiple OR'ed arguments.
* <li><js>"foo | bar | baz"</js> - Multiple OR'ed arguments, pipe syntax.
* <li><js>"foo || bar || baz"</js> - Multiple OR'ed arguments, Java-OR syntax.
* <li><js>"fo*"</js> - Patterns including <js>'*'</js> and <js>'?'</js>.
* <li><js>"fo* & *oo"</js> - Multiple AND'ed arguments, ampersand syntax.
* <li><js>"fo* && *oo"</js> - Multiple AND'ed arguments, Java-AND syntax.
* <li><js>"fo* || (*oo || bar)"</js> - Parenthesis.
* </ul>
* <li class='note'>
* AND operations take precedence over OR operations (as expected).
* <li class='note'>
* Whitespace is ignored.
* <li class='note'>
* <jk>null</jk> or empty expressions always match as <jk>false</jk>.
* <li class='note'>
* If patterns are used, you must specify the list of declared roles using {@link #rolesDeclared()} or {@link org.apache.juneau.rest.RestOpContext.Builder#rolesDeclared(String...)}.
* <li class='note'>
* Supports <a class="doclink" href="https://juneau.apache.org/docs/topics/RestServerSvlVariables">SVL Variables</a>
* (e.g. <js>"$L{my.localized.variable}"</js>).
* <li class='note'>
* When defined on parent/child classes and methods, ALL guards within the hierarchy must pass.
* </ul>
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jm'>{@link org.apache.juneau.rest.RestOpContext.Builder#roleGuard(String)}
* </ul>
*
* @return The annotation value.
*/
String roleGuard() default "";
/**
* Declared roles.
*
* <p>
* A comma-delimited list of all possible user roles.
*
* <p>
* Used in conjunction with {@link #roleGuard()} is used with patterns.
*
* <h5 class='section'>Example:</h5>
* <p class='bjava'>
* <jk>public class</jk> MyResource <jk>extends</jk> BasicRestServlet {
*
* <ja>@RestOp</ja>(
* method=<jsf>GET</jsf>,
* path=<js>"/foo"</js>,
* rolesDeclared=<js>"ROLE_ADMIN,ROLE_READ_WRITE,ROLE_READ_ONLY,ROLE_SPECIAL"</js>,
* roleGuard=<js>"ROLE_ADMIN || (ROLE_READ_WRITE && ROLE_SPECIAL)"</js>
* )
* <jk>public</jk> Object doGet() {
* }
* }
* </p>
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jm'>{@link org.apache.juneau.rest.RestOpContext.Builder#rolesDeclared(String...)}
* </ul>
*
* @return The annotation value.
*/
String rolesDeclared() default "";
/**
* Specifies the serializers for marshalling POJOs into response bodies for this method.
*
* <p>
* Serializer are used to convert POJOs to HTTP response bodies.
* <br>Any of the Juneau framework serializers can be used in this setting.
* <br>The serializer selected is based on the request <c>Accept</c> header matched against the values returned by the following method
* using a best-match algorithm:
* <ul class='javatree'>
* <li class='jm'>{@link Serializer#getMediaTypeRanges()}
* </ul>
*
* <p>
* This value overrides serializers specified at the class level using {@link Rest#serializers()}.
* The {@link org.apache.juneau.serializer.SerializerSet.Inherit} class can be used to include values from the parent class.
*
* <h5 class='section'>Example:</h5>
* <p class='bjava'>
* <jc>// Define a REST resource that can produce JSON and HTML.</jc>
* <ja>@Rest</ja>(
* serializers={
* JsonParser.<jk>class</jk>,
* HtmlParser.<jk>class</jk>
* }
* )
* <jk>public class</jk> MyResource {
*
* <jc>// Define a REST method that can also produce XML.</jc>
* <ja>@RestOp</ja>(
* method=<jsf>POST</jsf>,
* parsers={
* SerializerSet.Inherit.<jk>class</jk>, XmlParser.<jk>class</jk>
* }
* )
* <jk>public void</jk> doPost(MyBean <jv>bean</jv>) {
* ...
* }
* }
* </p>
*
* <p>
* The programmatic equivalent to this annotation is:
* <p class='bjava'>
* RestOpContext.Builder <jv>builder</jv> = RestOpContext.<jsm>create</jsm>(<jv>method</jv>,<jv>restContext</jv>);
* <jv>builder</jv>.getSerializers().set(<jv>classes</jv>);
* </p>
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='link'><a class="doclink" href="https://juneau.apache.org/docs/topics/Marshalling">Marshalling</a>
* </ul>
*
* @return The annotation value.
*/
Class<? extends Serializer>[] serializers() default {};
/**
* Optional summary for the exposed API.
*
* <p>
* This summary is used in the following locations:
* <ul class='spaced-list'>
* <li>
* The value returned by {@link Operation#getSummary()} in the auto-generated swagger.
* <li>
* The <js>"$RS{operationSummary}"</js> variable.
* <li>
* The summary of the method in the Swagger page.
* </ul>
*
* <h5 class='section'>Notes:</h5><ul>
* <li class='note'>
* Corresponds to the swagger field <c>/paths/{path}/{method}/summary</c>.
* <li class='note'>
* Supports <a class="doclink" href="https://juneau.apache.org/docs/topics/RestServerSvlVariables">SVL Variables</a>
* (e.g. <js>"$L{my.localized.variable}"</js>).
* </ul>
*
* @return The annotation value.
*/
String summary() default "";
/**
* Provides swagger-specific metadata on this method.
*
* <p>
* Used to populate the auto-generated OPTIONS swagger documentation.
*
* <p>
* The format of this annotation is JSON when all individual parts are concatenated.
* <br>The starting and ending <js>'{'</js>/<js>'}'</js> characters around the entire value are optional.
*
* <h5 class='section'>Example:</h5>
* <p class='bjava'>
* <ja>@RestOp</ja>(
* method=<jsf>PUT</jsf>,
* path=<js>"/{propertyName}"</js>,
*
* <jc>// Swagger info.</jc>
* swagger={
* <js>"parameters:["</js>,
* <js>"{name:'propertyName',in:'path',description:'The system property name.'},"</js>,
* <js>"{in:'body',description:'The new system property value.'}"</js>,
* <js>"],"</js>,
* <js>"responses:{"</js>,
* <js>"302: {headers:{Location:{description:'The root URL of this resource.'}}},"</js>,
* <js>"403: {description:'User is not an admin.'}"</js>,
* <js>"}"</js>
* }
* )
* </p>
*
* <h5 class='section'>Notes:</h5><ul>
* <li class='note'>
* The format is <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauBeanSwagger2">juneau-bean-swagger-v2</a>.
* <br>Multiple lines are concatenated with newlines.
* <li class='note'>
* The starting and ending <js>'{'</js>/<js>'}'</js> characters around the entire value are optional.
* <li class='note'>
* These values are superimposed on top of any Swagger JSON file present for the resource in the classpath.
* <li class='note'>
* Supports <a class="doclink" href="https://juneau.apache.org/docs/topics/RestServerSvlVariables">SVL Variables</a>
* (e.g. <js>"$L{my.localized.variable}"</js>).
* </ul>
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='ja'>{@link OpSwagger}
* <li class='jc'>{@link SwaggerProvider}
* </ul>
*
* @return The annotation value.
*/
OpSwagger swagger() default @OpSwagger;
/**
* REST method name and path.
*
* <p>
* Can be used to provide a shortened combined form for the {@link #method()} and {@link #path()} values.
*
* <p>
* The following examples are considered equivalent.
* <p class='bjava'>
* <jc>// Normal form</jc>
* <ja>@RestOp</ja>(method=<jsf>PUT</jsf>, path=<js>"/{propertyName}"</js>)
*
* <jc>// Shortened form</jc>
* <ja>@RestOp</ja>(<js>"PUT /{propertyName}"</js>)
* </p>
*
* <h5 class='section'>Notes:</h5><ul>
* <li class='note'>
* The path portion is optional.
* </ul>
*
* @return The annotation value.
*/
String value() default "";
}
|
googleapis/google-cloud-java | 36,036 | java-monitoring/proto-google-cloud-monitoring-v3/src/main/java/com/google/monitoring/v3/QueryTimeSeriesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/monitoring/v3/metric_service.proto
// Protobuf Java Version: 3.25.8
package com.google.monitoring.v3;
/**
*
*
* <pre>
* The `QueryTimeSeries` request. For information about the status of
* Monitoring Query Language (MQL), see the [MQL deprecation
* notice](https://cloud.google.com/stackdriver/docs/deprecations/mql).
* </pre>
*
* Protobuf type {@code google.monitoring.v3.QueryTimeSeriesRequest}
*/
@java.lang.Deprecated
public final class QueryTimeSeriesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.monitoring.v3.QueryTimeSeriesRequest)
QueryTimeSeriesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use QueryTimeSeriesRequest.newBuilder() to construct.
private QueryTimeSeriesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private QueryTimeSeriesRequest() {
name_ = "";
query_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new QueryTimeSeriesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.monitoring.v3.MetricServiceProto
.internal_static_google_monitoring_v3_QueryTimeSeriesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.v3.MetricServiceProto
.internal_static_google_monitoring_v3_QueryTimeSeriesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.v3.QueryTimeSeriesRequest.class,
com.google.monitoring.v3.QueryTimeSeriesRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The
* [project](https://cloud.google.com/monitoring/api/v3#project_name) on which
* to execute the request. The format is:
*
* projects/[PROJECT_ID_OR_NUMBER]
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The
* [project](https://cloud.google.com/monitoring/api/v3#project_name) on which
* to execute the request. The format is:
*
* projects/[PROJECT_ID_OR_NUMBER]
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int QUERY_FIELD_NUMBER = 7;
@SuppressWarnings("serial")
private volatile java.lang.Object query_ = "";
/**
*
*
* <pre>
* Required. The query in the [Monitoring Query
* Language](https://cloud.google.com/monitoring/mql/reference) format.
* The default time zone is in UTC.
* </pre>
*
* <code>string query = 7 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The query.
*/
@java.lang.Override
public java.lang.String getQuery() {
java.lang.Object ref = query_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
query_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The query in the [Monitoring Query
* Language](https://cloud.google.com/monitoring/mql/reference) format.
* The default time zone is in UTC.
* </pre>
*
* <code>string query = 7 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for query.
*/
@java.lang.Override
public com.google.protobuf.ByteString getQueryBytes() {
java.lang.Object ref = query_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
query_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 9;
private int pageSize_ = 0;
/**
*
*
* <pre>
* A positive number that is the maximum number of time_series_data to return.
* </pre>
*
* <code>int32 page_size = 9;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 10;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* If this field is not empty then it must contain the `nextPageToken` value
* returned by a previous call to this method. Using this field causes the
* method to return additional results from the previous method call.
* </pre>
*
* <code>string page_token = 10;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* If this field is not empty then it must contain the `nextPageToken` value
* returned by a previous call to this method. Using this field causes the
* method to return additional results from the previous method call.
* </pre>
*
* <code>string page_token = 10;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(query_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 7, query_);
}
if (pageSize_ != 0) {
output.writeInt32(9, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 10, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(query_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, query_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(9, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(10, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.monitoring.v3.QueryTimeSeriesRequest)) {
return super.equals(obj);
}
com.google.monitoring.v3.QueryTimeSeriesRequest other =
(com.google.monitoring.v3.QueryTimeSeriesRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!getQuery().equals(other.getQuery())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + QUERY_FIELD_NUMBER;
hash = (53 * hash) + getQuery().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.monitoring.v3.QueryTimeSeriesRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.QueryTimeSeriesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.QueryTimeSeriesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.QueryTimeSeriesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.QueryTimeSeriesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.QueryTimeSeriesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.QueryTimeSeriesRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.QueryTimeSeriesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.monitoring.v3.QueryTimeSeriesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.QueryTimeSeriesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.monitoring.v3.QueryTimeSeriesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.QueryTimeSeriesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.monitoring.v3.QueryTimeSeriesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The `QueryTimeSeries` request. For information about the status of
* Monitoring Query Language (MQL), see the [MQL deprecation
* notice](https://cloud.google.com/stackdriver/docs/deprecations/mql).
* </pre>
*
* Protobuf type {@code google.monitoring.v3.QueryTimeSeriesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.monitoring.v3.QueryTimeSeriesRequest)
com.google.monitoring.v3.QueryTimeSeriesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.monitoring.v3.MetricServiceProto
.internal_static_google_monitoring_v3_QueryTimeSeriesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.v3.MetricServiceProto
.internal_static_google_monitoring_v3_QueryTimeSeriesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.v3.QueryTimeSeriesRequest.class,
com.google.monitoring.v3.QueryTimeSeriesRequest.Builder.class);
}
// Construct using com.google.monitoring.v3.QueryTimeSeriesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
query_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.monitoring.v3.MetricServiceProto
.internal_static_google_monitoring_v3_QueryTimeSeriesRequest_descriptor;
}
@java.lang.Override
public com.google.monitoring.v3.QueryTimeSeriesRequest getDefaultInstanceForType() {
return com.google.monitoring.v3.QueryTimeSeriesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.monitoring.v3.QueryTimeSeriesRequest build() {
com.google.monitoring.v3.QueryTimeSeriesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.monitoring.v3.QueryTimeSeriesRequest buildPartial() {
com.google.monitoring.v3.QueryTimeSeriesRequest result =
new com.google.monitoring.v3.QueryTimeSeriesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.monitoring.v3.QueryTimeSeriesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.query_ = query_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.monitoring.v3.QueryTimeSeriesRequest) {
return mergeFrom((com.google.monitoring.v3.QueryTimeSeriesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.monitoring.v3.QueryTimeSeriesRequest other) {
if (other == com.google.monitoring.v3.QueryTimeSeriesRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getQuery().isEmpty()) {
query_ = other.query_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 58:
{
query_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 58
case 72:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 72
case 82:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 82
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The
* [project](https://cloud.google.com/monitoring/api/v3#project_name) on which
* to execute the request. The format is:
*
* projects/[PROJECT_ID_OR_NUMBER]
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The
* [project](https://cloud.google.com/monitoring/api/v3#project_name) on which
* to execute the request. The format is:
*
* projects/[PROJECT_ID_OR_NUMBER]
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The
* [project](https://cloud.google.com/monitoring/api/v3#project_name) on which
* to execute the request. The format is:
*
* projects/[PROJECT_ID_OR_NUMBER]
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The
* [project](https://cloud.google.com/monitoring/api/v3#project_name) on which
* to execute the request. The format is:
*
* projects/[PROJECT_ID_OR_NUMBER]
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The
* [project](https://cloud.google.com/monitoring/api/v3#project_name) on which
* to execute the request. The format is:
*
* projects/[PROJECT_ID_OR_NUMBER]
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object query_ = "";
/**
*
*
* <pre>
* Required. The query in the [Monitoring Query
* Language](https://cloud.google.com/monitoring/mql/reference) format.
* The default time zone is in UTC.
* </pre>
*
* <code>string query = 7 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The query.
*/
public java.lang.String getQuery() {
java.lang.Object ref = query_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
query_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The query in the [Monitoring Query
* Language](https://cloud.google.com/monitoring/mql/reference) format.
* The default time zone is in UTC.
* </pre>
*
* <code>string query = 7 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for query.
*/
public com.google.protobuf.ByteString getQueryBytes() {
java.lang.Object ref = query_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
query_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The query in the [Monitoring Query
* Language](https://cloud.google.com/monitoring/mql/reference) format.
* The default time zone is in UTC.
* </pre>
*
* <code>string query = 7 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The query to set.
* @return This builder for chaining.
*/
public Builder setQuery(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
query_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The query in the [Monitoring Query
* Language](https://cloud.google.com/monitoring/mql/reference) format.
* The default time zone is in UTC.
* </pre>
*
* <code>string query = 7 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearQuery() {
query_ = getDefaultInstance().getQuery();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The query in the [Monitoring Query
* Language](https://cloud.google.com/monitoring/mql/reference) format.
* The default time zone is in UTC.
* </pre>
*
* <code>string query = 7 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for query to set.
* @return This builder for chaining.
*/
public Builder setQueryBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
query_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* A positive number that is the maximum number of time_series_data to return.
* </pre>
*
* <code>int32 page_size = 9;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* A positive number that is the maximum number of time_series_data to return.
* </pre>
*
* <code>int32 page_size = 9;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A positive number that is the maximum number of time_series_data to return.
* </pre>
*
* <code>int32 page_size = 9;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* If this field is not empty then it must contain the `nextPageToken` value
* returned by a previous call to this method. Using this field causes the
* method to return additional results from the previous method call.
* </pre>
*
* <code>string page_token = 10;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* If this field is not empty then it must contain the `nextPageToken` value
* returned by a previous call to this method. Using this field causes the
* method to return additional results from the previous method call.
* </pre>
*
* <code>string page_token = 10;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* If this field is not empty then it must contain the `nextPageToken` value
* returned by a previous call to this method. Using this field causes the
* method to return additional results from the previous method call.
* </pre>
*
* <code>string page_token = 10;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* If this field is not empty then it must contain the `nextPageToken` value
* returned by a previous call to this method. Using this field causes the
* method to return additional results from the previous method call.
* </pre>
*
* <code>string page_token = 10;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* If this field is not empty then it must contain the `nextPageToken` value
* returned by a previous call to this method. Using this field causes the
* method to return additional results from the previous method call.
* </pre>
*
* <code>string page_token = 10;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.monitoring.v3.QueryTimeSeriesRequest)
}
// @@protoc_insertion_point(class_scope:google.monitoring.v3.QueryTimeSeriesRequest)
private static final com.google.monitoring.v3.QueryTimeSeriesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.monitoring.v3.QueryTimeSeriesRequest();
}
public static com.google.monitoring.v3.QueryTimeSeriesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<QueryTimeSeriesRequest> PARSER =
new com.google.protobuf.AbstractParser<QueryTimeSeriesRequest>() {
@java.lang.Override
public QueryTimeSeriesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<QueryTimeSeriesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<QueryTimeSeriesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.monitoring.v3.QueryTimeSeriesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/juneau | 34,253 | juneau-core/juneau-marshall/src/main/java/org/apache/juneau/json/JsonSchemaSerializer.java | // ***************************************************************************************************************************
// * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file *
// * distributed with this work for additional information regarding copyright ownership. The ASF licenses this file *
// * to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance *
// * with the License. You may obtain a copy of the License at *
// * *
// * http://www.apache.org/licenses/LICENSE-2.0 *
// * *
// * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an *
// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the *
// * specific language governing permissions and limitations under the License. *
// ***************************************************************************************************************************
package org.apache.juneau.json;
import static org.apache.juneau.collections.JsonMap.*;
import java.lang.annotation.*;
import java.nio.charset.*;
import java.util.*;
import java.util.concurrent.*;
import org.apache.juneau.*;
import org.apache.juneau.annotation.*;
import org.apache.juneau.collections.*;
import org.apache.juneau.internal.*;
import org.apache.juneau.jsonschema.*;
import org.apache.juneau.utils.*;
/**
* Serializes POJO metadata to HTTP responses as JSON-Schema.
*
* <h5 class='topic'>Media types</h5>
*
* Handles <c>Accept</c> types: <bc>application/json+schema, text/json+schema</bc>
* <p>
* Produces <c>Content-Type</c> types: <bc>application/json</bc>
*
* <h5 class='topic'>Description</h5>
*
* Produces the JSON-schema for the JSON produced by the {@link JsonSerializer} class with the same properties.
*
* <h5 class='section'>Notes:</h5><ul>
* <li class='note'>This class is thread safe and reusable.
* </ul>
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='link'><a class="doclink" href="https://juneau.apache.org/docs/topics/JsonBasics">JSON Basics</a>
* </ul>
*/
public class JsonSchemaSerializer extends JsonSerializer implements JsonSchemaMetaProvider {
//-------------------------------------------------------------------------------------------------------------------
// Static
//-------------------------------------------------------------------------------------------------------------------
/** Default serializer, all default settings.*/
public static final JsonSchemaSerializer DEFAULT = new JsonSchemaSerializer(create());
/** Default serializer, all default settings.*/
public static final JsonSchemaSerializer DEFAULT_READABLE = new Readable(create());
/** Default serializer, single quotes, simple mode. */
public static final JsonSchemaSerializer DEFAULT_SIMPLE = new Simple(create());
/** Default serializer, single quotes, simple mode, with whitespace. */
public static final JsonSchemaSerializer DEFAULT_SIMPLE_READABLE = new SimpleReadable(create());
/**
* Creates a new builder for this object.
*
* @return A new builder.
*/
public static Builder create() {
return new Builder();
}
//-------------------------------------------------------------------------------------------------------------------
// Static subclasses
//-------------------------------------------------------------------------------------------------------------------
/** Default serializer, with whitespace. */
public static class Readable extends JsonSchemaSerializer {
/**
* Constructor.
*
* @param builder The builder for this object.
*/
public Readable(Builder builder) {
super(builder.useWhitespace());
}
}
/** Default serializer, single quotes, simple mode. */
public static class Simple extends JsonSchemaSerializer {
/**
* Constructor.
*
* @param builder The builder for this object.
*/
public Simple(Builder builder) {
super(builder.simpleAttrs().quoteChar('\''));
}
}
/** Default serializer, single quotes, simple mode, with whitespace. */
public static class SimpleReadable extends JsonSchemaSerializer {
/**
* Constructor.
*
* @param builder The builder for this object.
*/
public SimpleReadable(Builder builder) {
super(builder.simpleAttrs().quoteChar('\'').useWhitespace());
}
}
//-------------------------------------------------------------------------------------------------------------------
// Builder
//-------------------------------------------------------------------------------------------------------------------
/**
* Builder class.
*/
@FluentSetters
public static class Builder extends JsonSerializer.Builder {
private static final Cache<HashKey,JsonSchemaSerializer> CACHE = Cache.of(HashKey.class, JsonSchemaSerializer.class).build();
JsonSchemaGenerator.Builder generatorBuilder;
/**
* Constructor, default settings.
*/
protected Builder() {
produces("application/json");
accept("application/json+schema,text/json+schema");
generatorBuilder = JsonSchemaGenerator.create().beanContext(beanContext());
}
/**
* Copy constructor.
*
* @param copyFrom The bean to copy from.
*/
protected Builder(JsonSchemaSerializer copyFrom) {
super(copyFrom);
generatorBuilder = copyFrom.generator.copy().beanContext(beanContext());
}
/**
* Copy constructor.
*
* @param copyFrom The builder to copy from.
*/
protected Builder(Builder copyFrom) {
super(copyFrom);
generatorBuilder = copyFrom.generatorBuilder.copy().beanContext(beanContext());
}
@Override /* Context.Builder */
public Builder copy() {
return new Builder(this);
}
@Override /* Context.Builder */
public JsonSchemaSerializer build() {
return cache(CACHE).build(JsonSchemaSerializer.class);
}
@Override /* Context.Builder */
public HashKey hashKey() {
return HashKey.of(
super.hashKey(),
generatorBuilder.hashKey()
);
}
//-----------------------------------------------------------------------------------------------------------------
// Properties
//-----------------------------------------------------------------------------------------------------------------
/**
* <i><l>JsonSchemaSerializer</l> configuration property: </i> Add descriptions.
*
* <p>
* Identifies which categories of types that descriptions should be automatically added to generated schemas.
* <p>
* The description is the result of calling {@link ClassMeta#getFullName()}.
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jm'>{@link org.apache.juneau.jsonschema.JsonSchemaGenerator.Builder#addDescriptionsTo(TypeCategory...)}
* </ul>
*
* @param values
* The values to add to this setting.
* <br>The default is an empty string.
* @return This object.
*/
@FluentSetter
public Builder addDescriptionsTo(TypeCategory...values) {
generatorBuilder.addDescriptionsTo(values);
return this;
}
/**
* <i><l>JsonSchemaSerializer</l> configuration property: </i> Add examples.
*
* <p>
* Identifies which categories of types that examples should be automatically added to generated schemas.
* <p>
* The examples come from calling {@link ClassMeta#getExample(BeanSession,JsonParserSession)} which in turn gets examples
* from the following:
* <ul class='javatree'>
* <li class='ja'>{@link Example}
* <li class='ja'>{@link Marshalled#example() Marshalled(example)}
* </ul>
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jm'>{@link org.apache.juneau.jsonschema.JsonSchemaGenerator.Builder#addExamplesTo(TypeCategory...)}
* </ul>
*
* @param values
* The values to add to this setting.
* <br>The default is an empty string.
* @return This object.
*/
@FluentSetter
public Builder addExamplesTo(TypeCategory...values) {
generatorBuilder.addExamplesTo(values);
return this;
}
/**
* <i><l>JsonSchemaSerializer</l> configuration property: </i> Allow nested descriptions.
*
* <p>
* Identifies whether nested descriptions are allowed in schema definitions.
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jm'>{@link org.apache.juneau.jsonschema.JsonSchemaGenerator.Builder#allowNestedDescriptions()}
* </ul>
*
* @return This object.
*/
@FluentSetter
public Builder allowNestedDescriptions() {
generatorBuilder.allowNestedDescriptions();
return this;
}
/**
* <i><l>JsonSchemaSerializer</l> configuration property: </i> Allow nested examples.
*
* <p>
* Identifies whether nested examples are allowed in schema definitions.
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jm'>{@link org.apache.juneau.jsonschema.JsonSchemaGenerator.Builder#allowNestedExamples()}
* </ul>
*
* @return This object.
*/
@FluentSetter
public Builder allowNestedExamples() {
generatorBuilder.allowNestedExamples();
return this;
}
/**
* <i><l>JsonSchemaSerializer</l> configuration property: </i> Schema definition mapper.
*
* <p>
* Interface to use for converting Bean classes to definition IDs and URIs.
* <p>
* Used primarily for defining common definition sections for beans in Swagger JSON.
* <p>
* This setting is ignored if {@link org.apache.juneau.jsonschema.JsonSchemaGenerator.Builder#useBeanDefs()} is not enabled.
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jm'>{@link org.apache.juneau.jsonschema.JsonSchemaGenerator.Builder#beanDefMapper(Class)}
* </ul>
*
* @param value
* The new value for this property.
* <br>The default is {@link org.apache.juneau.jsonschema.BasicBeanDefMapper}.
* @return This object.
*/
@FluentSetter
public Builder beanDefMapper(Class<? extends BeanDefMapper> value) {
generatorBuilder.beanDefMapper(value);
return this;
}
/**
* <i><l>JsonSchemaSerializer</l> configuration property: </i> Use bean definitions.
*
* <p>
* When enabled, schemas on beans will be serialized as the following:
* <p class='bjson'>
* {
* type: <js>'object'</js>,
* <js>'$ref'</js>: <js>'#/definitions/TypeId'</js>
* }
* </p>
*
* @return This object.
*/
@FluentSetter
public Builder useBeanDefs() {
generatorBuilder.useBeanDefs();
return this;
}
// <FluentSetters>
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder annotations(Annotation...values) {
super.annotations(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder apply(AnnotationWorkList work) {
super.apply(work);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder applyAnnotations(Object...from) {
super.applyAnnotations(from);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder applyAnnotations(Class<?>...from) {
super.applyAnnotations(from);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder cache(Cache<HashKey,? extends org.apache.juneau.Context> value) {
super.cache(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder debug() {
super.debug();
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder debug(boolean value) {
super.debug(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder impl(Context value) {
super.impl(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder type(Class<? extends org.apache.juneau.Context> value) {
super.type(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanClassVisibility(Visibility value) {
super.beanClassVisibility(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanConstructorVisibility(Visibility value) {
super.beanConstructorVisibility(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanContext(BeanContext value) {
super.beanContext(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanContext(BeanContext.Builder value) {
super.beanContext(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanDictionary(java.lang.Class<?>...values) {
super.beanDictionary(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanFieldVisibility(Visibility value) {
super.beanFieldVisibility(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanInterceptor(Class<?> on, Class<? extends org.apache.juneau.swap.BeanInterceptor<?>> value) {
super.beanInterceptor(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanMapPutReturnsOldValue() {
super.beanMapPutReturnsOldValue();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanMethodVisibility(Visibility value) {
super.beanMethodVisibility(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanProperties(Map<String,Object> values) {
super.beanProperties(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanProperties(Class<?> beanClass, String properties) {
super.beanProperties(beanClass, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanProperties(String beanClassName, String properties) {
super.beanProperties(beanClassName, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesExcludes(Map<String,Object> values) {
super.beanPropertiesExcludes(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesExcludes(Class<?> beanClass, String properties) {
super.beanPropertiesExcludes(beanClass, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesExcludes(String beanClassName, String properties) {
super.beanPropertiesExcludes(beanClassName, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesReadOnly(Map<String,Object> values) {
super.beanPropertiesReadOnly(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesReadOnly(Class<?> beanClass, String properties) {
super.beanPropertiesReadOnly(beanClass, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesReadOnly(String beanClassName, String properties) {
super.beanPropertiesReadOnly(beanClassName, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesWriteOnly(Map<String,Object> values) {
super.beanPropertiesWriteOnly(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesWriteOnly(Class<?> beanClass, String properties) {
super.beanPropertiesWriteOnly(beanClass, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesWriteOnly(String beanClassName, String properties) {
super.beanPropertiesWriteOnly(beanClassName, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beansRequireDefaultConstructor() {
super.beansRequireDefaultConstructor();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beansRequireSerializable() {
super.beansRequireSerializable();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beansRequireSettersForGetters() {
super.beansRequireSettersForGetters();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder dictionaryOn(Class<?> on, java.lang.Class<?>...values) {
super.dictionaryOn(on, values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder disableBeansRequireSomeProperties() {
super.disableBeansRequireSomeProperties();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder disableIgnoreMissingSetters() {
super.disableIgnoreMissingSetters();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder disableIgnoreTransientFields() {
super.disableIgnoreTransientFields();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder disableIgnoreUnknownNullBeanProperties() {
super.disableIgnoreUnknownNullBeanProperties();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder disableInterfaceProxies() {
super.disableInterfaceProxies();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public <T> Builder example(Class<T> pojoClass, T o) {
super.example(pojoClass, o);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public <T> Builder example(Class<T> pojoClass, String json) {
super.example(pojoClass, json);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder findFluentSetters() {
super.findFluentSetters();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder findFluentSetters(Class<?> on) {
super.findFluentSetters(on);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder ignoreInvocationExceptionsOnGetters() {
super.ignoreInvocationExceptionsOnGetters();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder ignoreInvocationExceptionsOnSetters() {
super.ignoreInvocationExceptionsOnSetters();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder ignoreUnknownBeanProperties() {
super.ignoreUnknownBeanProperties();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder ignoreUnknownEnumValues() {
super.ignoreUnknownEnumValues();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder implClass(Class<?> interfaceClass, Class<?> implClass) {
super.implClass(interfaceClass, implClass);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder implClasses(Map<Class<?>,Class<?>> values) {
super.implClasses(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder interfaceClass(Class<?> on, Class<?> value) {
super.interfaceClass(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder interfaces(java.lang.Class<?>...value) {
super.interfaces(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder locale(Locale value) {
super.locale(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder mediaType(MediaType value) {
super.mediaType(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder notBeanClasses(java.lang.Class<?>...values) {
super.notBeanClasses(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder notBeanPackages(String...values) {
super.notBeanPackages(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder propertyNamer(Class<? extends org.apache.juneau.PropertyNamer> value) {
super.propertyNamer(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder propertyNamer(Class<?> on, Class<? extends org.apache.juneau.PropertyNamer> value) {
super.propertyNamer(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder sortProperties() {
super.sortProperties();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder sortProperties(java.lang.Class<?>...on) {
super.sortProperties(on);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder stopClass(Class<?> on, Class<?> value) {
super.stopClass(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public <T, S> Builder swap(Class<T> normalClass, Class<S> swappedClass, ThrowingFunction<T,S> swapFunction) {
super.swap(normalClass, swappedClass, swapFunction);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public <T, S> Builder swap(Class<T> normalClass, Class<S> swappedClass, ThrowingFunction<T,S> swapFunction, ThrowingFunction<S,T> unswapFunction) {
super.swap(normalClass, swappedClass, swapFunction, unswapFunction);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder swaps(Object...values) {
super.swaps(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder swaps(Class<?>...values) {
super.swaps(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder timeZone(TimeZone value) {
super.timeZone(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder typeName(Class<?> on, String value) {
super.typeName(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder typePropertyName(String value) {
super.typePropertyName(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder typePropertyName(Class<?> on, String value) {
super.typePropertyName(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder useEnumNames() {
super.useEnumNames();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder useJavaBeanIntrospector() {
super.useJavaBeanIntrospector();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder detectRecursions() {
super.detectRecursions();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder detectRecursions(boolean value) {
super.detectRecursions(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder ignoreRecursions() {
super.ignoreRecursions();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder ignoreRecursions(boolean value) {
super.ignoreRecursions(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder initialDepth(int value) {
super.initialDepth(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder maxDepth(int value) {
super.maxDepth(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder accept(String value) {
super.accept(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder addBeanTypes() {
super.addBeanTypes();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder addBeanTypes(boolean value) {
super.addBeanTypes(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder addRootType() {
super.addRootType();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder addRootType(boolean value) {
super.addRootType(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder keepNullProperties() {
super.keepNullProperties();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder keepNullProperties(boolean value) {
super.keepNullProperties(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder listener(Class<? extends org.apache.juneau.serializer.SerializerListener> value) {
super.listener(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder produces(String value) {
super.produces(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder sortCollections() {
super.sortCollections();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder sortCollections(boolean value) {
super.sortCollections(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder sortMaps() {
super.sortMaps();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder sortMaps(boolean value) {
super.sortMaps(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder trimEmptyCollections() {
super.trimEmptyCollections();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder trimEmptyCollections(boolean value) {
super.trimEmptyCollections(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder trimEmptyMaps() {
super.trimEmptyMaps();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder trimEmptyMaps(boolean value) {
super.trimEmptyMaps(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder trimStrings() {
super.trimStrings();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder trimStrings(boolean value) {
super.trimStrings(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder uriContext(UriContext value) {
super.uriContext(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder uriRelativity(UriRelativity value) {
super.uriRelativity(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder uriResolution(UriResolution value) {
super.uriResolution(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder fileCharset(Charset value) {
super.fileCharset(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder maxIndent(int value) {
super.maxIndent(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder quoteChar(char value) {
super.quoteChar(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder quoteCharOverride(char value) {
super.quoteCharOverride(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder sq() {
super.sq();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder streamCharset(Charset value) {
super.streamCharset(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder useWhitespace() {
super.useWhitespace();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder useWhitespace(boolean value) {
super.useWhitespace(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder ws() {
super.ws();
return this;
}
@Override /* GENERATED - org.apache.juneau.json.JsonSerializer.Builder */
public Builder addBeanTypesJson() {
super.addBeanTypesJson();
return this;
}
@Override /* GENERATED - org.apache.juneau.json.JsonSerializer.Builder */
public Builder addBeanTypesJson(boolean value) {
super.addBeanTypesJson(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.json.JsonSerializer.Builder */
public Builder escapeSolidus() {
super.escapeSolidus();
return this;
}
@Override /* GENERATED - org.apache.juneau.json.JsonSerializer.Builder */
public Builder escapeSolidus(boolean value) {
super.escapeSolidus(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.json.JsonSerializer.Builder */
public Builder simpleAttrs() {
super.simpleAttrs();
return this;
}
@Override /* GENERATED - org.apache.juneau.json.JsonSerializer.Builder */
public Builder simpleAttrs(boolean value) {
super.simpleAttrs(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.json.JsonSerializer.Builder */
public Builder json5() {
super.json5();
return this;
}
// </FluentSetters>
}
//-------------------------------------------------------------------------------------------------------------------
// Instance
//-------------------------------------------------------------------------------------------------------------------
final JsonSchemaGenerator generator;
private final Map<ClassMeta<?>,JsonSchemaClassMeta> jsonSchemaClassMetas = new ConcurrentHashMap<>();
private final Map<BeanPropertyMeta,JsonSchemaBeanPropertyMeta> jsonSchemaBeanPropertyMetas = new ConcurrentHashMap<>();
/**
* Constructor.
*
* @param builder The builder for this object.
*/
public JsonSchemaSerializer(Builder builder) {
super(builder.detectRecursions().ignoreRecursions());
generator = builder.generatorBuilder.build();
}
@Override /* Context */
public Builder copy() {
return new Builder(this);
}
@Override /* Context */
public JsonSchemaSerializerSession.Builder createSession() {
return JsonSchemaSerializerSession.create(this);
}
@Override /* Context */
public JsonSchemaSerializerSession getSession() {
return createSession().build();
}
JsonSchemaGenerator getGenerator() {
return generator;
}
//-----------------------------------------------------------------------------------------------------------------
// Extended metadata
//-----------------------------------------------------------------------------------------------------------------
@Override /* JsonSchemaMetaProvider */
public JsonSchemaClassMeta getJsonSchemaClassMeta(ClassMeta<?> cm) {
JsonSchemaClassMeta m = jsonSchemaClassMetas.get(cm);
if (m == null) {
m = new JsonSchemaClassMeta(cm, this);
jsonSchemaClassMetas.put(cm, m);
}
return m;
}
@Override /* JsonSchemaMetaProvider */
public JsonSchemaBeanPropertyMeta getJsonSchemaBeanPropertyMeta(BeanPropertyMeta bpm) {
JsonSchemaBeanPropertyMeta m = jsonSchemaBeanPropertyMetas.get(bpm);
if (m == null) {
m = new JsonSchemaBeanPropertyMeta(bpm.getDelegateFor(), this);
jsonSchemaBeanPropertyMetas.put(bpm, m);
}
return m;
}
//-----------------------------------------------------------------------------------------------------------------
// Other methods
//-----------------------------------------------------------------------------------------------------------------
@Override /* Context */
protected JsonMap properties() {
return filteredMap("generator", generator);
}
} |
openjdk/skara | 36,087 | bots/pr/src/test/java/org/openjdk/skara/bots/pr/CSRBotTests.java | /*
* Copyright (c) 2023, 2024, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package org.openjdk.skara.bots.pr;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInfo;
import org.openjdk.skara.issuetracker.Issue;
import org.openjdk.skara.issuetracker.Link;
import org.openjdk.skara.json.JSON;
import org.openjdk.skara.test.CheckableRepository;
import org.openjdk.skara.test.HostCredentials;
import org.openjdk.skara.test.TemporaryDirectory;
import org.openjdk.skara.test.TestBotRunner;
import java.io.IOException;
import java.nio.file.Files;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.junit.jupiter.api.Assertions.*;
class CSRBotTests {
@Test
void removeLabelForApprovedCSR(TestInfo testInfo) throws IOException {
try (var credentials = new HostCredentials(testInfo);
var tempFolder = new TemporaryDirectory()) {
var author = credentials.getHostedRepository();
var reviewer = credentials.getHostedRepository();
var bot = credentials.getHostedRepository();
var issueProject = credentials.getIssueProject();
var issue = issueProject.createIssue("This is an issue", List.of(), Map.of());
issue.setProperty("issuetype", JSON.of("Bug"));
var censusBuilder = credentials.getCensusBuilder()
.addReviewer(reviewer.forge().currentUser().id())
.addCommitter(author.forge().currentUser().id());
Map<String, List<PRRecord>> issuePRMap = new HashMap<>();
var prBot = PullRequestBot.newBuilder()
.repo(bot)
.issueProject(issueProject)
.censusRepo(censusBuilder.build())
.enableCsr(true)
.issuePRMap(issuePRMap)
.build();
var csrIssueBot = new CSRIssueBot(issueProject, List.of(author), Map.of(bot.name(), prBot), issuePRMap);
// Run issue bot once to initialize lastUpdatedAt
TestBotRunner.runPeriodicItems(csrIssueBot);
var csr = issueProject.createIssue("This is a CSR", List.of(), Map.of());
csr.setState(Issue.State.OPEN);
csr.setProperty("issuetype", JSON.of("CSR"));
issue.addLink(Link.create(csr, "csr for").build());
// Populate the projects repository
var localRepoFolder = tempFolder.path().resolve("localrepo");
var localRepo = CheckableRepository.init(localRepoFolder, author.repositoryType());
var masterHash = localRepo.resolve("master").orElseThrow();
assertFalse(CheckableRepository.hasBeenEdited(localRepo));
localRepo.push(masterHash, author.authenticatedUrl(), "master", true);
// Make a change with a corresponding PR
var editHash = CheckableRepository.appendAndCommit(localRepo);
localRepo.push(editHash, author.authenticatedUrl(), "edit", true);
var pr = credentials.createPullRequest(author, "master", "edit", issue.id() + ": This is an issue");
TestBotRunner.runPeriodicItems(prBot);
// Use CSRIssueBot to add CSR label
TestBotRunner.runPeriodicItems(csrIssueBot);
assertTrue(pr.store().labelNames().contains("csr"));
// Approve CSR issue
csr.setState(Issue.State.CLOSED);
csr.setProperty("resolution", JSON.object().put("name", "Approved"));
// Run bot
TestBotRunner.runPeriodicItems(csrIssueBot);
// The bot should have removed the CSR label
assertFalse(pr.store().labelNames().contains("csr"));
assertTrue(pr.store().body().contains("- [x] Change requires CSR request"));
}
}
@Test
void keepLabelForNoIssue(TestInfo testInfo) throws IOException {
try (var credentials = new HostCredentials(testInfo);
var tempFolder = new TemporaryDirectory()) {
var author = credentials.getHostedRepository();
var reviewer = credentials.getHostedRepository();
var bot = credentials.getHostedRepository();
var issues = credentials.getIssueProject();
var censusBuilder = credentials.getCensusBuilder()
.addReviewer(reviewer.forge().currentUser().id())
.addCommitter(author.forge().currentUser().id());
var prBot = PullRequestBot.newBuilder().repo(bot).issueProject(issues).censusRepo(censusBuilder.build()).enableCsr(true).build();
// Populate the projects repository
var localRepoFolder = tempFolder.path().resolve("localrepo");
var localRepo = CheckableRepository.init(localRepoFolder, author.repositoryType());
var masterHash = localRepo.resolve("master").orElseThrow();
assertFalse(CheckableRepository.hasBeenEdited(localRepo));
localRepo.push(masterHash, author.authenticatedUrl(), "master", true);
// Make a change with a corresponding PR
var editHash = CheckableRepository.appendAndCommit(localRepo);
localRepo.push(editHash, author.authenticatedUrl(), "edit", true);
var pr = credentials.createPullRequest(author, "master", "edit", "This is an issue");
// Use csr command to add csr label
var reviewPr = reviewer.pullRequest(pr.id());
reviewPr.addComment("/csr");
TestBotRunner.runPeriodicItems(prBot);
assertTrue(pr.store().labelNames().contains("csr"));
// Run bot
TestBotRunner.runPeriodicItems(prBot);
// The bot should have kept the CSR label
assertTrue(pr.store().labelNames().contains("csr"));
}
}
@Test
void keepLabelForNoJBS(TestInfo testInfo) throws IOException {
try (var credentials = new HostCredentials(testInfo);
var tempFolder = new TemporaryDirectory()) {
var author = credentials.getHostedRepository();
var reviewer = credentials.getHostedRepository();
var bot = credentials.getHostedRepository();
var issueProject = credentials.getIssueProject();
var censusBuilder = credentials.getCensusBuilder()
.addReviewer(reviewer.forge().currentUser().id())
.addCommitter(author.forge().currentUser().id());
var prBot = PullRequestBot.newBuilder().repo(bot).issueProject(issueProject).censusRepo(censusBuilder.build()).enableCsr(true).build();
// Populate the projects repository
var localRepoFolder = tempFolder.path().resolve("localrepo");
var localRepo = CheckableRepository.init(localRepoFolder, author.repositoryType());
var masterHash = localRepo.resolve("master").orElseThrow();
assertFalse(CheckableRepository.hasBeenEdited(localRepo));
localRepo.push(masterHash, author.authenticatedUrl(), "master", true);
// Make a change with a corresponding PR
var editHash = CheckableRepository.appendAndCommit(localRepo);
localRepo.push(editHash, author.authenticatedUrl(), "edit", true);
var pr = credentials.createPullRequest(author, "master", "edit", "123: This is an issue");
// Use csr command to add csr label
var reviewPr = reviewer.pullRequest(pr.id());
reviewPr.addComment("/csr");
TestBotRunner.runPeriodicItems(prBot);
assertTrue(pr.store().labelNames().contains("csr"));
// Run bot
TestBotRunner.runPeriodicItems(prBot);
// The bot should have kept the CSR label
assertTrue(pr.store().labelNames().contains("csr"));
}
}
@Test
void keepLabelForNotApprovedCSR(TestInfo testInfo) throws IOException {
try (var credentials = new HostCredentials(testInfo);
var tempFolder = new TemporaryDirectory()) {
var author = credentials.getHostedRepository();
var reviewer = credentials.getHostedRepository();
var bot = credentials.getHostedRepository();
var issues = credentials.getIssueProject();
var issue = issues.createIssue("This is an issue", List.of(), Map.of());
var csr = issues.createIssue("This is an approved CSR", List.of(), Map.of("resolution",
JSON.object().put("name", "Unresolved")));
csr.setState(Issue.State.OPEN);
issue.addLink(Link.create(csr, "csr for").build());
var censusBuilder = credentials.getCensusBuilder()
.addReviewer(reviewer.forge().currentUser().id())
.addCommitter(author.forge().currentUser().id());
Map<String, List<PRRecord>> issuePRMap = new HashMap<>();
var prBot = PullRequestBot.newBuilder()
.repo(bot)
.issueProject(issues)
.censusRepo(censusBuilder.build())
.enableCsr(true)
.issuePRMap(issuePRMap)
.build();
// Populate the projects repository
var localRepoFolder = tempFolder.path().resolve("localrepo");
var localRepo = CheckableRepository.init(localRepoFolder, author.repositoryType());
var masterHash = localRepo.resolve("master").orElseThrow();
assertFalse(CheckableRepository.hasBeenEdited(localRepo));
localRepo.push(masterHash, author.authenticatedUrl(), "master", true);
// Make a change with a corresponding PR
var editHash = CheckableRepository.appendAndCommit(localRepo);
localRepo.push(editHash, author.authenticatedUrl(), "edit", true);
var pr = credentials.createPullRequest(author, "master", "edit", issue.id() + ": This is an issue");
// Run bot
TestBotRunner.runPeriodicItems(prBot);
// The bot added the csr label automatically
assertTrue(pr.store().labelNames().contains("csr"));
// Run bot
TestBotRunner.runPeriodicItems(prBot);
// The bot should have kept the CSR label
assertTrue(pr.store().labelNames().contains("csr"));
}
}
@Test
void handleCSRWithNullResolution(TestInfo testInfo) throws IOException {
try (var credentials = new HostCredentials(testInfo);
var tempFolder = new TemporaryDirectory()) {
var author = credentials.getHostedRepository();
var reviewer = credentials.getHostedRepository();
var bot = credentials.getHostedRepository();
var issues = credentials.getIssueProject();
var issue = issues.createIssue("This is an issue", List.of(), Map.of());
var csr = issues.createIssue("This is an CSR with null resolution", List.of(), Map.of("resolution", JSON.of()));
csr.setState(Issue.State.OPEN);
issue.addLink(Link.create(csr, "csr for").build());
var censusBuilder = credentials.getCensusBuilder()
.addReviewer(reviewer.forge().currentUser().id())
.addCommitter(author.forge().currentUser().id());
Map<String, List<PRRecord>> issuePRMap = new HashMap<>();
var prBot = PullRequestBot.newBuilder()
.repo(bot)
.issueProject(issues)
.censusRepo(censusBuilder.build())
.enableCsr(true)
.issuePRMap(issuePRMap)
.build();
// Populate the projects repository
var localRepoFolder = tempFolder.path().resolve("localrepo");
var localRepo = CheckableRepository.init(localRepoFolder, author.repositoryType());
var masterHash = localRepo.resolve("master").orElseThrow();
assertFalse(CheckableRepository.hasBeenEdited(localRepo));
localRepo.push(masterHash, author.authenticatedUrl(), "master", true);
// Make a change with a corresponding PR
var editHash = CheckableRepository.appendAndCommit(localRepo);
localRepo.push(editHash, author.authenticatedUrl(), "edit", true);
var pr = credentials.createPullRequest(author, "master", "edit", issue.id() + ": This is an issue");
// Run bot
TestBotRunner.runPeriodicItems(prBot);
// The bot added the csr label automatically
assertTrue(pr.store().labelNames().contains("csr"));
// Run bot, should *not* throw NPE
TestBotRunner.runPeriodicItems(prBot);
// The bot should have kept the CSR label
assertTrue(pr.store().labelNames().contains("csr"));
}
}
@Test
void handleCSRWithNullName(TestInfo testInfo) throws IOException {
try (var credentials = new HostCredentials(testInfo);
var tempFolder = new TemporaryDirectory()) {
var author = credentials.getHostedRepository();
var reviewer = credentials.getHostedRepository();
var bot = credentials.getHostedRepository();
var issues = credentials.getIssueProject();
var issue = issues.createIssue("This is an issue", List.of(), Map.of());
var csr = issues.createIssue("This is a CSR with null resolution", List.of(),
Map.of("resolution", JSON.object().put("name", JSON.of())));
csr.setState(Issue.State.OPEN);
issue.addLink(Link.create(csr, "csr for").build());
var censusBuilder = credentials.getCensusBuilder()
.addReviewer(reviewer.forge().currentUser().id())
.addCommitter(author.forge().currentUser().id());
Map<String, List<PRRecord>> issuePRMap = new HashMap<>();
var prBot = PullRequestBot.newBuilder()
.repo(bot)
.issueProject(issues)
.censusRepo(censusBuilder.build())
.enableCsr(true)
.issuePRMap(issuePRMap)
.build();
// Populate the projects repository
var localRepoFolder = tempFolder.path().resolve("localrepo");
var localRepo = CheckableRepository.init(localRepoFolder, author.repositoryType());
var masterHash = localRepo.resolve("master").orElseThrow();
assertFalse(CheckableRepository.hasBeenEdited(localRepo));
localRepo.push(masterHash, author.authenticatedUrl(), "master", true);
// Make a change with a corresponding PR
var editHash = CheckableRepository.appendAndCommit(localRepo);
localRepo.push(editHash, author.authenticatedUrl(), "edit", true);
var pr = credentials.createPullRequest(author, "master", "edit", issue.id() + ": This is an issue");
// Run bot
TestBotRunner.runPeriodicItems(prBot);
// The bot added the csr label automatically
assertTrue(pr.store().labelNames().contains("csr"));
// Run bot, should *not* throw NPE
TestBotRunner.runPeriodicItems(prBot);
// The bot should have kept the CSR label
assertTrue(pr.store().labelNames().contains("csr"));
}
}
@Test
void testBackportCsr(TestInfo testInfo) throws IOException {
try (var credentials = new HostCredentials(testInfo);
var tempFolder = new TemporaryDirectory()) {
var author = credentials.getHostedRepository();
var reviewer = credentials.getHostedRepository();
var bot = credentials.getHostedRepository();
var issueProject = credentials.getIssueProject();
var censusBuilder = credentials.getCensusBuilder()
.addReviewer(reviewer.forge().currentUser().id())
.addCommitter(author.forge().currentUser().id());
Map<String, List<PRRecord>> issuePRMap = new HashMap<>();
var prBot = PullRequestBot.newBuilder()
.repo(bot)
.issueProject(issueProject)
.censusRepo(censusBuilder.build())
.enableCsr(true)
.issuePRMap(issuePRMap)
.build();
var csrIssueBot = new CSRIssueBot(issueProject, List.of(author), Map.of(bot.name(), prBot), issuePRMap);
// Run issue bot once to initialize lastUpdatedAt
TestBotRunner.runPeriodicItems(csrIssueBot);
var issue = issueProject.createIssue("This is the primary issue", List.of(), Map.of());
issue.setState(Issue.State.CLOSED);
issue.setProperty("issuetype", JSON.of("Bug"));
issue.setProperty("fixVersions", JSON.array().add("18"));
var csr = issueProject.createIssue("This is the primary CSR", List.of(), Map.of());
csr.setState(Issue.State.CLOSED);
csr.setProperty("issuetype", JSON.of("CSR"));
csr.setProperty("fixVersions", JSON.array().add("18"));
issue.addLink(Link.create(csr, "csr for").build());
// Populate the projects repository
var localRepoFolder = tempFolder.path().resolve("localrepo");
var localRepo = CheckableRepository.init(localRepoFolder, author.repositoryType());
var masterHash = localRepo.resolve("master").orElseThrow();
localRepo.push(masterHash, author.authenticatedUrl(), "master", true);
// Push a commit to the jdk18 branch
var jdk18Branch = localRepo.branch(masterHash, "jdk18");
localRepo.checkout(jdk18Branch);
var newFile = localRepo.root().resolve("a_new_file.txt");
Files.writeString(newFile, "a_new_file");
localRepo.add(newFile);
var issueNumber = issue.id().split("-")[1];
var commitMessage = issueNumber + ": This is the primary issue\n\nReviewed-by: integrationreviewer2";
var commitHash = localRepo.commit(commitMessage, "integrationcommitter1", "integrationcommitter1@openjdk.org");
localRepo.push(commitHash, author.authenticatedUrl(), "jdk18", true);
// "backport" the commit to the master branch
localRepo.checkout(localRepo.defaultBranch());
var editBranch = localRepo.branch(masterHash, "edit");
localRepo.checkout(editBranch);
var newFile2 = localRepo.root().resolve("a_new_file.txt");
Files.writeString(newFile2, "a_new_file");
localRepo.add(newFile2);
var editHash = localRepo.commit("Backport", "duke", "duke@openjdk.org");
localRepo.push(editHash, author.authenticatedUrl(), "edit", true);
var pr = credentials.createPullRequest(author, "master", "edit", "Backport " + commitHash.hex());
// run bot to add backport label
TestBotRunner.runPeriodicItems(prBot);
assertTrue(pr.store().labelNames().contains("backport"));
// Remove `version=0.1` from `.jcheck/conf`, set the version as null in the edit branch
var defaultConf = Files.readString(localRepo.root().resolve(".jcheck/conf"));
var newConf = defaultConf.replace("version=0.1", "");
Files.writeString(localRepo.root().resolve(".jcheck/conf"), newConf);
localRepo.add(localRepo.root().resolve(".jcheck/conf"));
var confHash = localRepo.commit("Set version as null", "duke", "duke@openjdk.org");
localRepo.push(confHash, author.authenticatedUrl(), "edit", true);
assertFalse(pr.store().labelNames().contains("csr"));
// Run bot. The bot won't get a CSR.
TestBotRunner.runPeriodicItems(prBot);
// The bot shouldn't add the `csr` label.
assertFalse(pr.store().labelNames().contains("csr"));
// Add `version=bla` to `.jcheck/conf`, set the version as a wrong value
defaultConf = Files.readString(localRepo.root().resolve(".jcheck/conf"));
newConf = defaultConf.replace("project=test", "project=test\nversion=bla");
Files.writeString(localRepo.root().resolve(".jcheck/conf"), newConf);
localRepo.add(localRepo.root().resolve(".jcheck/conf"));
confHash = localRepo.commit("Set the version as a wrong value", "duke", "duke@openjdk.org");
localRepo.push(confHash, author.authenticatedUrl(), "edit", true);
// Run bot. The bot won't get a CSR.
TestBotRunner.runPeriodicItems(prBot);
// The bot shouldn't add the `csr` label.
assertFalse(pr.store().labelNames().contains("csr"));
// Test the method `TestPullRequest#diff`.
assertEquals(1, pr.diff().patches().size());
// Set the `version` in `.jcheck/conf` as 17 which is an available version.
defaultConf = Files.readString(localRepo.root().resolve(".jcheck/conf"));
newConf = defaultConf.replace("version=bla", "version=17");
Files.writeString(localRepo.root().resolve(".jcheck/conf"), newConf);
localRepo.add(localRepo.root().resolve(".jcheck/conf"));
confHash = localRepo.commit("Set the version as 17", "duke", "duke@openjdk.org");
localRepo.push(confHash, author.authenticatedUrl(), "edit", true);
// Run bot. The primary CSR doesn't have the fix version `17`, so the bot won't get a CSR.
TestBotRunner.runPeriodicItems(prBot);
// The bot shouldn't add the `csr` label.
assertFalse(pr.store().labelNames().contains("csr"));
// Set the fix versions of the primary CSR to 17 and 18.
csr.setProperty("fixVersions", JSON.array().add("17").add("18"));
// Run csr issue bot to trigger on updates to the CSR issue. The primary CSR has
// the fix version `17`, so it would be used and the `csr` label would be added.
TestBotRunner.runPeriodicItems(csrIssueBot);
// The bot should have added the `csr` label
assertTrue(pr.store().labelNames().contains("csr"));
// Revert the fix versions of the primary CSR to 18.
csr.setProperty("fixVersions", JSON.array().add("18"));
// Create a backport issue whose fix version is 17
var backportIssue = issueProject.createIssue("This is the backport issue", List.of(), Map.of());
backportIssue.setProperty("issuetype", JSON.of("Backport"));
backportIssue.setProperty("fixVersions", JSON.array().add("17"));
backportIssue.setState(Issue.State.OPEN);
issue.addLink(Link.create(backportIssue, "backported by").build());
assertTrue(pr.store().labelNames().contains("csr"));
// remove the csr label with /csr command
var reviewerPr = reviewer.pullRequest(pr.id());
reviewerPr.addComment("/csr unneeded");
// Run csrIssueBot to update pr body
TestBotRunner.runPeriodicItems(csrIssueBot);
TestBotRunner.runPeriodicItems(prBot);
assertFalse(pr.store().labelNames().contains("csr"));
// Run bot. The bot can find a backport issue but can't find a backport CSR.
TestBotRunner.runPeriodicItems(prBot);
// The bot shouldn't add the `csr` label.
assertFalse(pr.store().labelNames().contains("csr"));
// Create a backport CSR whose fix version is 17.
var backportCsr = issueProject.createIssue("This is the backport CSR", List.of(), Map.of());
backportCsr.setProperty("issuetype", JSON.of("CSR"));
backportCsr.setProperty("fixVersions", JSON.array().add("17"));
backportCsr.setState(Issue.State.OPEN);
backportIssue.addLink(Link.create(backportCsr, "csr for").build());
// Run csr issue bot. The bot can find a backport issue and a backport CSR.
TestBotRunner.runPeriodicItems(csrIssueBot);
// The bot should have added the CSR label
assertTrue(pr.store().labelNames().contains("csr"));
// Now we have a primary issue, a primary CSR, a backport issue, a backport CSR.
// Set the backport CSR to have multiple fix versions, included 11.
backportCsr.setProperty("fixVersions", JSON.array().add("17").add("11").add("8"));
// Set the `version` in `.jcheck/conf` as 11.
defaultConf = Files.readString(localRepo.root().resolve(".jcheck/conf"));
newConf = defaultConf.replace("version=17", "version=11");
Files.writeString(localRepo.root().resolve(".jcheck/conf"), newConf);
localRepo.add(localRepo.root().resolve(".jcheck/conf"));
confHash = localRepo.commit("Set the version as 11", "duke", "duke@openjdk.org");
localRepo.push(confHash, author.authenticatedUrl(), "edit", true);
pr.removeLabel("csr");
// Run bot.
TestBotRunner.runPeriodicItems(prBot);
// The bot should have added the CSR label
assertTrue(pr.store().labelNames().contains("csr"));
// Set the backport CSR to have multiple fix versions, excluded 11.
backportCsr.setProperty("fixVersions", JSON.array().add("17").add("8"));
reviewerPr.addComment("/csr unneeded");
// Run csrIssueBot to update the pr body
TestBotRunner.runPeriodicItems(csrIssueBot);
TestBotRunner.runPeriodicItems(prBot);
assertFalse(pr.store().labelNames().contains("csr"));
// Run bot.
TestBotRunner.runPeriodicItems(prBot);
// The bot shouldn't add the `csr` label.
assertFalse(pr.store().labelNames().contains("csr"));
}
}
@Test
void testPRWithMultipleIssues(TestInfo testInfo) throws IOException {
try (var credentials = new HostCredentials(testInfo);
var tempFolder = new TemporaryDirectory()) {
var author = credentials.getHostedRepository();
var reviewer = credentials.getHostedRepository();
var bot = credentials.getHostedRepository();
var issueProject = credentials.getIssueProject();
var issue = issueProject.createIssue("This is an issue", List.of(), Map.of());
issue.setProperty("issuetype", JSON.of("Bug"));
var censusBuilder = credentials.getCensusBuilder()
.addReviewer(reviewer.forge().currentUser().id())
.addCommitter(author.forge().currentUser().id());
Map<String, List<PRRecord>> issuePRMap = new HashMap<>();
var prBot = PullRequestBot.newBuilder()
.repo(bot).issueProject(issueProject)
.censusRepo(censusBuilder.build())
.enableCsr(true)
.issuePRMap(issuePRMap)
.build();
var csrIssueBot = new CSRIssueBot(issueProject, List.of(author), Map.of(bot.name(), prBot), issuePRMap);
// Run issue bot once to initialize lastUpdatedAt
TestBotRunner.runPeriodicItems(csrIssueBot);
// Populate the projects repository
var localRepoFolder = tempFolder.path().resolve("localrepo");
var localRepo = CheckableRepository.init(localRepoFolder, author.repositoryType());
var masterHash = localRepo.resolve("master").orElseThrow();
assertFalse(CheckableRepository.hasBeenEdited(localRepo));
localRepo.push(masterHash, author.authenticatedUrl(), "master", true);
// Make a change with a corresponding PR
var editHash = CheckableRepository.appendAndCommit(localRepo);
localRepo.push(editHash, author.authenticatedUrl(), "edit", true);
var pr = credentials.createPullRequest(author, "master", "edit", issue.id() + ": This is an issue");
// Run bot
TestBotRunner.runPeriodicItems(prBot);
// Add another issue to this pr
var issue2 = issueProject.createIssue("This is an issue 2", List.of(), Map.of());
issue2.setProperty("issuetype", JSON.of("Bug"));
// Add issue2 to this pr
pr.addComment("/issue " + issue2.id());
TestBotRunner.runPeriodicItems(prBot);
assertTrue(pr.store().comments().getLast().body().contains("solves: '2'"));
// Add a csr to issue2
var csr2 = issueProject.createIssue("This is an CSR for issue2", List.of(), Map.of());
csr2.setProperty("issuetype", JSON.of("CSR"));
csr2.setState(Issue.State.OPEN);
issue2.addLink(Link.create(csr2, "csr for").build());
TestBotRunner.runPeriodicItems(csrIssueBot);
// PR should contain csr label
assertTrue(pr.store().labelNames().contains("csr"));
assertTrue(pr.store().body().contains("This is an CSR for issue2"));
// Add another issue to this pr
var issue3 = issueProject.createIssue("This is an issue 3", List.of(), Map.of());
issue3.setProperty("issuetype", JSON.of("Bug"));
// Add issue3 to this pr
pr.addComment("/issue " + issue3.id());
TestBotRunner.runPeriodicItems(prBot);
assertTrue(pr.store().comments().getLast().body().contains("solves: '4'"));
// Withdrawn the csr for issue2
csr2.setState(Issue.State.CLOSED);
csr2.setProperty("resolution", JSON.object().put("name", "Withdrawn"));
TestBotRunner.runPeriodicItems(csrIssueBot);
assertTrue(pr.store().body().contains("This is an CSR for issue2 (**CSR**) (Withdrawn)"));
// PR should not contain csr label
assertFalse(pr.store().labelNames().contains("csr"));
// Add a csr to issue3
var csr3 = issueProject.createIssue("This is an CSR for issue3", List.of(), Map.of());
csr3.setProperty("issuetype", JSON.of("CSR"));
csr3.setState(Issue.State.OPEN);
issue3.addLink(Link.create(csr3, "csr for").build());
TestBotRunner.runPeriodicItems(csrIssueBot);
// PR should contain csr label
assertTrue(pr.store().labelNames().contains("csr"));
// Approve CSR3
csr3.setState(Issue.State.CLOSED);
csr3.setProperty("resolution", JSON.object().put("name", "Approved"));
TestBotRunner.runPeriodicItems(csrIssueBot);
// PR should not contain csr label
assertFalse(pr.store().labelNames().contains("csr"));
// Approve CSR2
csr2.setProperty("resolution", JSON.object().put("name", "Approved"));
TestBotRunner.runPeriodicItems(csrIssueBot);
// PR should not contain csr label
assertFalse(pr.store().labelNames().contains("csr"));
}
}
@Test
void testFindCSRWithVersionInMergedBranch(TestInfo testInfo) throws IOException {
try (var credentials = new HostCredentials(testInfo);
var tempFolder = new TemporaryDirectory()) {
var author = credentials.getHostedRepository();
var reviewer = credentials.getHostedRepository();
var bot = credentials.getHostedRepository();
var issueProject = credentials.getIssueProject();
var issue = issueProject.createIssue("This is an issue", List.of(), Map.of());
issue.setProperty("issuetype", JSON.of("Bug"));
var censusBuilder = credentials.getCensusBuilder()
.addReviewer(reviewer.forge().currentUser().id())
.addCommitter(author.forge().currentUser().id());
Map<String, List<PRRecord>> issuePRMap = new HashMap<>();
var prBot = PullRequestBot.newBuilder()
.repo(bot)
.issueProject(issueProject)
.censusRepo(censusBuilder.build())
.enableCsr(true)
.issuePRMap(issuePRMap)
.build();
var csrIssueBot = new CSRIssueBot(issueProject, List.of(author), Map.of(bot.name(), prBot), issuePRMap);
// Run issue bot once to initialize lastUpdatedAt
TestBotRunner.runPeriodicItems(csrIssueBot);
var csr = issueProject.createIssue("This is a CSR", List.of(), Map.of());
csr.setState(Issue.State.OPEN);
csr.setProperty("issuetype", JSON.of("CSR"));
csr.setProperty("fixVersions", JSON.array().add("17"));
issue.addLink(Link.create(csr, "csr for").build());
// Populate the projects repository
var localRepoFolder = tempFolder.path().resolve("localrepo");
var localRepo = CheckableRepository.init(localRepoFolder, author.repositoryType());
var masterHash = localRepo.resolve("master").orElseThrow();
assertFalse(CheckableRepository.hasBeenEdited(localRepo));
localRepo.push(masterHash, author.authenticatedUrl(), "master", true);
// Make a change with a corresponding PR
var editHash = CheckableRepository.appendAndCommit(localRepo);
localRepo.push(editHash, author.authenticatedUrl(), "edit", true);
var pr = credentials.createPullRequest(author, "master", "edit", issue.id() + ": This is an issue");
TestBotRunner.runPeriodicItems(prBot);
// Change .jcheck/conf in targetBranch
localRepo.checkout(masterHash);
var defaultConf = Files.readString(localRepo.root().resolve(".jcheck/conf"));
var newConf = defaultConf.replace("version=0.1", "version=17");
Files.writeString(localRepo.root().resolve(".jcheck/conf"), newConf);
localRepo.add(localRepo.root().resolve(".jcheck/conf"));
var confHash = localRepo.commit("Set version as 17", "duke", "duke@openjdk.org");
localRepo.push(confHash, author.authenticatedUrl(), "master", true);
// The bot will be able to find the csr although fixVersion in source branch is 0.1
TestBotRunner.runPeriodicItems(csrIssueBot);
assertTrue(pr.store().labelNames().contains("csr"));
reviewer.pullRequest(pr.id()).addComment("/csr unneeded");
TestBotRunner.runPeriodicItems(prBot);
assertTrue(pr.store().comments().getLast().body()
.contains("@user2 The CSR requirement cannot be removed as CSR issues already exist. " +
"Please withdraw [TEST-2](http://localhost/project/testTEST-2) and then use the command `/csr unneeded` again."));
}
}
}
|
apache/helix | 36,366 | helix-core/src/test/java/org/apache/helix/common/ZkTestBase.java | package org.apache.helix.common;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import com.google.common.base.Preconditions;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import javax.management.MBeanServerConnection;
import javax.management.ObjectName;
import org.apache.helix.BaseDataAccessor;
import org.apache.helix.ConfigAccessor;
import org.apache.helix.HelixAdmin;
import org.apache.helix.HelixDataAccessor;
import org.apache.helix.HelixManager;
import org.apache.helix.HelixProperty;
import org.apache.helix.PropertyKey;
import org.apache.helix.PropertyKey.Builder;
import org.apache.helix.PropertyPathBuilder;
import org.apache.helix.SystemPropertyKeys;
import org.apache.helix.TestHelper;
import org.apache.helix.api.config.HelixConfigProperty;
import org.apache.helix.constants.InstanceConstants;
import org.apache.helix.controller.pipeline.AbstractAsyncBaseStage;
import org.apache.helix.controller.pipeline.Pipeline;
import org.apache.helix.controller.pipeline.Stage;
import org.apache.helix.controller.pipeline.StageContext;
import org.apache.helix.controller.rebalancer.DelayedAutoRebalancer;
import org.apache.helix.controller.rebalancer.strategy.AutoRebalanceStrategy;
import org.apache.helix.controller.rebalancer.waged.WagedRebalancer;
import org.apache.helix.controller.stages.AttributeName;
import org.apache.helix.controller.stages.ClusterEvent;
import org.apache.helix.integration.manager.MockParticipantManager;
import org.apache.helix.manager.zk.ZKHelixAdmin;
import org.apache.helix.manager.zk.ZKHelixDataAccessor;
import org.apache.helix.manager.zk.ZNRecordSerializer;
import org.apache.helix.manager.zk.ZkBaseDataAccessor;
import org.apache.helix.model.BuiltInStateModelDefinitions;
import org.apache.helix.model.ClusterConfig;
import org.apache.helix.model.CurrentState;
import org.apache.helix.model.ExternalView;
import org.apache.helix.model.IdealState;
import org.apache.helix.model.InstanceConfig;
import org.apache.helix.model.LiveInstance;
import org.apache.helix.model.Message;
import org.apache.helix.model.OnlineOfflineSMD;
import org.apache.helix.model.ResourceConfig;
import org.apache.helix.model.StateModelDefinition;
import org.apache.helix.tools.ClusterSetup;
import org.apache.helix.tools.ClusterStateVerifier;
import org.apache.helix.tools.StateModelConfigGenerator;
import org.apache.helix.zookeeper.api.client.HelixZkClient;
import org.apache.helix.zookeeper.datamodel.ZNRecord;
import org.apache.helix.zookeeper.impl.client.ZkClient;
import org.apache.helix.zookeeper.impl.factory.DedicatedZkClientFactory;
import org.apache.helix.zookeeper.zkclient.ZkServer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.ITestContext;
import org.testng.annotations.AfterClass;
import org.testng.annotations.AfterSuite;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.BeforeSuite;
public class ZkTestBase {
private static final Logger LOG = LoggerFactory.getLogger(ZkTestBase.class);
private static final String MULTI_ZK_PROPERTY_KEY = "multiZk";
private static final String NUM_ZK_PROPERTY_KEY = "numZk";
protected static ZkServer _zkServer;
protected static HelixZkClient _gZkClient;
protected static ClusterSetup _gSetupTool;
protected static BaseDataAccessor<ZNRecord> _baseAccessor;
protected static MBeanServerConnection _server = ManagementFactory.getPlatformMBeanServer();
private final Map<String, Map<String, HelixZkClient>> _liveInstanceOwners = new HashMap<>();
private static final String ZK_PREFIX = "localhost:";
private static final int ZK_START_PORT = 2183;
public static final String ZK_ADDR = ZK_PREFIX + ZK_START_PORT;
protected static final String CLUSTER_PREFIX = "CLUSTER";
protected static final String CONTROLLER_CLUSTER_PREFIX = "CONTROLLER_CLUSTER";
protected final String CONTROLLER_PREFIX = "controller";
protected final String PARTICIPANT_PREFIX = "localhost";
private static final long MANUAL_GC_PAUSE = 4000L;
/*
* Multiple ZK references
*/
// The following maps hold ZK connect string as keys
protected static final Map<String, ZkServer> _zkServerMap = new HashMap<>();
protected static final Map<String, HelixZkClient> _helixZkClientMap = new HashMap<>();
protected static final Map<String, ClusterSetup> _clusterSetupMap = new HashMap<>();
protected static final Map<String, BaseDataAccessor> _baseDataAccessorMap = new HashMap<>();
static public void reportPhysicalMemory() {
com.sun.management.OperatingSystemMXBean os = (com.sun.management.OperatingSystemMXBean)
java.lang.management.ManagementFactory.getOperatingSystemMXBean();
long physicalMemorySize = os.getTotalPhysicalMemorySize();
System.out.println("************ SYSTEM Physical Memory:" + physicalMemorySize);
long MB = 1024 * 1024;
Runtime runtime = Runtime.getRuntime();
long free = runtime.freeMemory()/MB;
long total = runtime.totalMemory()/MB;
System.out.println("************ total memory:" + total + " free memory:" + free);
}
@BeforeSuite
public void beforeSuite() throws Exception {
// TODO: use logging.properties file to config java.util.logging.Logger levels
java.util.logging.Logger topJavaLogger = java.util.logging.Logger.getLogger("");
topJavaLogger.setLevel(Level.WARNING);
// Due to ZOOKEEPER-2693 fix, we need to specify whitelist for execute zk commends
System.setProperty("zookeeper.4lw.commands.whitelist", "*");
System.setProperty(SystemPropertyKeys.CONTROLLER_MESSAGE_PURGE_DELAY, "3000");
// Start in-memory ZooKeepers
// If multi-ZooKeeper is enabled, start more ZKs. Otherwise, just set up one ZK
int numZkToStart = 1;
String multiZkConfig = System.getProperty(MULTI_ZK_PROPERTY_KEY);
if (multiZkConfig != null && multiZkConfig.equalsIgnoreCase(Boolean.TRUE.toString())) {
String numZkFromConfig = System.getProperty(NUM_ZK_PROPERTY_KEY);
if (numZkFromConfig != null) {
try {
numZkToStart = Math.max(Integer.parseInt(numZkFromConfig), numZkToStart);
} catch (Exception e) {
Assert.fail("Failed to parse the number of ZKs from config!");
}
} else {
Assert.fail("multiZk config is set but numZk config is missing!");
}
}
// Start "numZkFromConfigInt" ZooKeepers
for (int i = 0; i < numZkToStart; i++) {
startZooKeeper(i);
}
// Set the references for backward-compatibility with a single ZK environment
_zkServer = _zkServerMap.get(ZK_ADDR);
_gZkClient = _helixZkClientMap.get(ZK_ADDR);
_gSetupTool = _clusterSetupMap.get(ZK_ADDR);
_baseAccessor = _baseDataAccessorMap.get(ZK_ADDR);
// Clean up all JMX objects
for (ObjectName mbean : _server.queryNames(null, null)) {
try {
_server.unregisterMBean(mbean);
} catch (Exception e) {
// OK
}
}
}
/**
* Starts an additional in-memory ZooKeeper for testing.
* @param i index to be added to the ZK port to avoid conflicts
* @throws Exception
*/
private static synchronized void startZooKeeper(int i) {
String zkAddress = ZK_PREFIX + (ZK_START_PORT + i);
_zkServerMap.computeIfAbsent(zkAddress, ZkTestBase::createZookeeperServer);
_helixZkClientMap.computeIfAbsent(zkAddress, ZkTestBase::createZkClient);
_clusterSetupMap.computeIfAbsent(zkAddress, key -> new ClusterSetup(_helixZkClientMap.get(key)));
_baseDataAccessorMap.computeIfAbsent(zkAddress, key -> new ZkBaseDataAccessor(_helixZkClientMap.get(key)));
}
private static ZkServer createZookeeperServer(String zkAddress) {
try {
return Preconditions.checkNotNull(TestHelper.startZkServer(zkAddress));
} catch (Exception e) {
throw new IllegalArgumentException("Failed to start zookeeper server at " + zkAddress, e);
}
}
private static HelixZkClient createZkClient(String zkAddress) {
HelixZkClient.ZkClientConfig clientConfig = new HelixZkClient.ZkClientConfig();
clientConfig.setZkSerializer(new ZNRecordSerializer());
return DedicatedZkClientFactory.getInstance()
.buildZkClient(new HelixZkClient.ZkConnectionConfig(zkAddress), clientConfig);
}
@AfterSuite
public void afterSuite() throws IOException {
// Clean up all JMX objects
for (ObjectName mbean : _server.queryNames(null, null)) {
try {
_server.unregisterMBean(mbean);
} catch (Exception e) {
// OK
}
}
synchronized (ZkTestBase.class) {
// Close all ZK resources
_baseDataAccessorMap.values().forEach(BaseDataAccessor::close);
_clusterSetupMap.values().forEach(ClusterSetup::close);
_helixZkClientMap.values().forEach(HelixZkClient::close);
_zkServerMap.values().forEach(TestHelper::stopZkServer);
}
}
@BeforeClass
public void beforeClass() throws Exception {
cleanupJMXObjects();
// Giving each test some time to settle (such as gc pause, etc).
// Note that this is the best effort we could make to stabilize tests, not a complete solution
Runtime.getRuntime().gc();
Thread.sleep(MANUAL_GC_PAUSE);
}
@BeforeMethod
public void beforeTest(Method testMethod, ITestContext testContext) {
testContext.setAttribute("StartTime", System.currentTimeMillis());
}
protected void cleanupJMXObjects() throws IOException {
// Clean up all JMX objects
for (ObjectName mbean : _server.queryNames(null, null)) {
try {
_server.unregisterMBean(mbean);
} catch (Exception e) {
// OK
}
}
}
protected String getShortClassName() {
return this.getClass().getSimpleName();
}
protected String getCurrentLeader(HelixZkClient zkClient, String clusterName) {
ZKHelixDataAccessor accessor =
new ZKHelixDataAccessor(clusterName, new ZkBaseDataAccessor<>(zkClient));
Builder keyBuilder = accessor.keyBuilder();
LiveInstance leader = accessor.getProperty(keyBuilder.controllerLeader());
if (leader == null) {
return null;
}
return leader.getInstanceName();
}
protected void enablePersistBestPossibleAssignment(HelixZkClient zkClient, String clusterName,
Boolean enabled) {
ConfigAccessor configAccessor = new ConfigAccessor(zkClient);
ClusterConfig clusterConfig = configAccessor.getClusterConfig(clusterName);
clusterConfig.setPersistBestPossibleAssignment(enabled);
configAccessor.setClusterConfig(clusterName, clusterConfig);
}
protected void enablePersistIntermediateAssignment(HelixZkClient zkClient, String clusterName,
Boolean enabled) {
ConfigAccessor configAccessor = new ConfigAccessor(zkClient);
ClusterConfig clusterConfig = configAccessor.getClusterConfig(clusterName);
clusterConfig.setPersistIntermediateAssignment(enabled);
configAccessor.setClusterConfig(clusterName, clusterConfig);
}
protected void enableTopologyAwareRebalance(HelixZkClient zkClient, String clusterName,
Boolean enabled) {
ConfigAccessor configAccessor = new ConfigAccessor(zkClient);
ClusterConfig clusterConfig = configAccessor.getClusterConfig(clusterName);
clusterConfig.setTopologyAwareEnabled(enabled);
configAccessor.setClusterConfig(clusterName, clusterConfig);
}
protected void enableDelayRebalanceInCluster(HelixZkClient zkClient, String clusterName,
boolean enabled) {
ConfigAccessor configAccessor = new ConfigAccessor(zkClient);
ClusterConfig clusterConfig = configAccessor.getClusterConfig(clusterName);
clusterConfig.setDelayRebalaceEnabled(enabled);
configAccessor.setClusterConfig(clusterName, clusterConfig);
}
protected void enableDelayRebalanceInInstance(HelixZkClient zkClient, String clusterName,
String instanceName, boolean enabled) {
ConfigAccessor configAccessor = new ConfigAccessor(zkClient);
InstanceConfig instanceConfig = configAccessor.getInstanceConfig(clusterName, instanceName);
instanceConfig.setDelayRebalanceEnabled(enabled);
configAccessor.setInstanceConfig(clusterName, instanceName, instanceConfig);
}
protected void enableDelayRebalanceInCluster(HelixZkClient zkClient, String clusterName,
boolean enabled, long delay) {
ConfigAccessor configAccessor = new ConfigAccessor(zkClient);
ClusterConfig clusterConfig = configAccessor.getClusterConfig(clusterName);
clusterConfig.setDelayRebalaceEnabled(enabled);
clusterConfig.setRebalanceDelayTime(delay);
configAccessor.setClusterConfig(clusterName, clusterConfig);
}
protected void enableP2PInCluster(String clusterName, ConfigAccessor configAccessor,
boolean enable) {
// enable p2p message in cluster.
if (enable) {
ClusterConfig clusterConfig = configAccessor.getClusterConfig(clusterName);
clusterConfig.enableP2PMessage(true);
configAccessor.setClusterConfig(clusterName, clusterConfig);
} else {
ClusterConfig clusterConfig = configAccessor.getClusterConfig(clusterName);
clusterConfig.getRecord().getSimpleFields()
.remove(HelixConfigProperty.P2P_MESSAGE_ENABLED.name());
configAccessor.setClusterConfig(clusterName, clusterConfig);
}
}
protected void enableP2PInResource(String clusterName, ConfigAccessor configAccessor,
String dbName, boolean enable) {
if (enable) {
ResourceConfig resourceConfig =
new ResourceConfig.Builder(dbName).setP2PMessageEnabled(true).build();
configAccessor.setResourceConfig(clusterName, dbName, resourceConfig);
} else {
// remove P2P Message in resource config
ResourceConfig resourceConfig = configAccessor.getResourceConfig(clusterName, dbName);
if (resourceConfig != null) {
resourceConfig.getRecord().getSimpleFields()
.remove(HelixConfigProperty.P2P_MESSAGE_ENABLED.name());
configAccessor.setResourceConfig(clusterName, dbName, resourceConfig);
}
}
}
protected void setDelayTimeInCluster(HelixZkClient zkClient, String clusterName, long delay) {
ConfigAccessor configAccessor = new ConfigAccessor(zkClient);
ClusterConfig clusterConfig = configAccessor.getClusterConfig(clusterName);
clusterConfig.setRebalanceDelayTime(delay);
configAccessor.setClusterConfig(clusterName, clusterConfig);
}
protected void setLastOnDemandRebalanceTimeInCluster(HelixZkClient zkClient,
String clusterName, long lastOnDemandTime) {
ConfigAccessor configAccessor = new ConfigAccessor(zkClient);
ClusterConfig clusterConfig = configAccessor.getClusterConfig(clusterName);
clusterConfig.setLastOnDemandRebalanceTimestamp(lastOnDemandTime);
configAccessor.setClusterConfig(clusterName, clusterConfig);
}
protected IdealState createResourceWithDelayedRebalance(String clusterName, String db,
String stateModel, int numPartition, int replica, int minActiveReplica, long delay) {
return createResourceWithDelayedRebalance(clusterName, db, stateModel, numPartition, replica,
minActiveReplica, delay, AutoRebalanceStrategy.class.getName());
}
protected IdealState createResourceWithDelayedRebalance(String clusterName, String db,
String stateModel, int numPartition, int replica, int minActiveReplica, long delay,
String rebalanceStrategy) {
return createResource(clusterName, db, stateModel, numPartition, replica, minActiveReplica,
delay, DelayedAutoRebalancer.class.getName(), rebalanceStrategy);
}
protected IdealState createResourceWithWagedRebalance(String clusterName, String db,
String stateModel, int numPartition, int replica, int minActiveReplica) {
return createResource(clusterName, db, stateModel, numPartition, replica, minActiveReplica,
-1, WagedRebalancer.class.getName(), null);
}
private IdealState createResource(String clusterName, String db, String stateModel,
int numPartition, int replica, int minActiveReplica, long delay, String rebalancerClassName,
String rebalanceStrategy) {
IdealState idealState =
_gSetupTool.getClusterManagementTool().getResourceIdealState(clusterName, db);
if (idealState == null) {
_gSetupTool.addResourceToCluster(clusterName, db, numPartition, stateModel,
IdealState.RebalanceMode.FULL_AUTO + "", rebalanceStrategy);
}
idealState = _gSetupTool.getClusterManagementTool().getResourceIdealState(clusterName, db);
idealState.setMinActiveReplicas(minActiveReplica);
if (!idealState.isDelayRebalanceEnabled()) {
idealState.setDelayRebalanceEnabled(true);
}
if (delay > 0) {
idealState.setRebalanceDelay(delay);
}
idealState.setRebalancerClassName(rebalancerClassName);
_gSetupTool.getClusterManagementTool().setResourceIdealState(clusterName, db, idealState);
_gSetupTool.rebalanceStorageCluster(clusterName, db, replica);
idealState = _gSetupTool.getClusterManagementTool().getResourceIdealState(clusterName, db);
return idealState;
}
protected IdealState createIdealState(String resourceGroupName, String instanceGroupTag,
List<String> instanceNames, int numPartition, int replica, String rebalanceMode,
String stateModelDef) {
IdealState is = _gSetupTool.createIdealStateForResourceGroup(resourceGroupName,
instanceGroupTag, numPartition, replica, rebalanceMode, stateModelDef);
// setup initial partition->instance mapping.
int nodeIdx = 0;
int numNode = instanceNames.size();
assert (numNode >= replica);
for (int i = 0; i < numPartition; i++) {
String partitionName = resourceGroupName + "_" + i;
for (int j = 0; j < replica; j++) {
is.setPartitionState(partitionName, instanceNames.get((nodeIdx + j) % numNode),
OnlineOfflineSMD.States.ONLINE.toString());
}
nodeIdx++;
}
return is;
}
protected void createDBInSemiAuto(ClusterSetup clusterSetup, String clusterName, String dbName,
List<String> preferenceList, String stateModelDef, int numPartition, int replica) {
clusterSetup.addResourceToCluster(clusterName, dbName, numPartition, stateModelDef,
IdealState.RebalanceMode.SEMI_AUTO.toString());
clusterSetup.rebalanceStorageCluster(clusterName, dbName, replica);
IdealState is =
_gSetupTool.getClusterManagementTool().getResourceIdealState(clusterName, dbName);
for (String p : is.getPartitionSet()) {
is.setPreferenceList(p, preferenceList);
}
clusterSetup.getClusterManagementTool().setResourceIdealState(clusterName, dbName, is);
}
/**
* Validate there should be always minimal active replica and top state replica for each
* partition.
* Also make sure there is always some partitions with only active replica count.
*/
protected void validateMinActiveAndTopStateReplica(IdealState is, ExternalView ev,
int minActiveReplica, int numNodes) {
StateModelDefinition stateModelDef =
BuiltInStateModelDefinitions.valueOf(is.getStateModelDefRef()).getStateModelDefinition();
String topState = stateModelDef.getStatesPriorityList().get(0);
int replica = Integer.valueOf(is.getReplicas());
Map<String, Integer> stateCount = stateModelDef.getStateCountMap(numNodes, replica);
Set<String> activeStates = stateCount.keySet();
for (String partition : is.getPartitionSet()) {
Map<String, String> assignmentMap = ev.getRecord().getMapField(partition);
Assert.assertNotNull(assignmentMap,
is.getResourceName() + "'s best possible assignment is null for partition " + partition);
Assert.assertTrue(!assignmentMap.isEmpty(),
is.getResourceName() + "'s partition " + partition + " has no best possible map in IS.");
boolean hasTopState = false;
int activeReplica = 0;
for (String state : assignmentMap.values()) {
if (topState.equalsIgnoreCase(state)) {
hasTopState = true;
}
if (activeStates.contains(state)) {
activeReplica++;
}
}
if (activeReplica < minActiveReplica) {
int a = 0;
}
Assert.assertTrue(hasTopState, String.format("%s missing %s replica", partition, topState));
Assert.assertTrue(activeReplica >= minActiveReplica,
String.format("%s has less active replica %d then required %d", partition, activeReplica,
minActiveReplica));
}
}
protected void runStage(HelixManager manager, ClusterEvent event, Stage stage) throws Exception {
event.addAttribute(AttributeName.helixmanager.name(), manager);
StageContext context = new StageContext();
stage.init(context);
stage.preProcess();
// AbstractAsyncBaseStage will run asynchronously, and it's main logics are implemented in
// execute() function call
if (stage instanceof AbstractAsyncBaseStage) {
((AbstractAsyncBaseStage) stage).execute(event);
} else {
stage.process(event);
}
stage.postProcess();
}
public void verifyInstance(HelixZkClient zkClient, String clusterName, String instance,
boolean wantExists) {
// String instanceConfigsPath = HelixUtil.getConfigPath(clusterName);
String instanceConfigsPath = PropertyPathBuilder.instanceConfig(clusterName);
String instanceConfigPath = instanceConfigsPath + "/" + instance;
String instancePath = PropertyPathBuilder.instance(clusterName, instance);
Assert.assertEquals(wantExists, zkClient.exists(instanceConfigPath));
Assert.assertEquals(wantExists, zkClient.exists(instancePath));
}
public void verifyResource(HelixZkClient zkClient, String clusterName, String resource,
boolean wantExists) {
String resourcePath = PropertyPathBuilder.idealState(clusterName, resource);
Assert.assertEquals(wantExists, zkClient.exists(resourcePath));
}
public void verifyEnabled(HelixZkClient zkClient, String clusterName, String instance,
boolean wantEnabled) {
ZKHelixDataAccessor accessor =
new ZKHelixDataAccessor(clusterName, new ZkBaseDataAccessor<ZNRecord>(zkClient));
Builder keyBuilder = accessor.keyBuilder();
InstanceConfig config = accessor.getProperty(keyBuilder.instanceConfig(instance));
Assert.assertEquals(wantEnabled, config.getInstanceEnabled());
}
public void verifyReplication(HelixZkClient zkClient, String clusterName, String resource,
int repl) {
ZKHelixDataAccessor accessor =
new ZKHelixDataAccessor(clusterName, new ZkBaseDataAccessor<ZNRecord>(zkClient));
Builder keyBuilder = accessor.keyBuilder();
IdealState idealState = accessor.getProperty(keyBuilder.idealStates(resource));
for (String partitionName : idealState.getPartitionSet()) {
if (idealState.getRebalanceMode() == IdealState.RebalanceMode.SEMI_AUTO) {
Assert.assertEquals(repl, idealState.getPreferenceList(partitionName).size());
} else if (idealState.getRebalanceMode() == IdealState.RebalanceMode.CUSTOMIZED) {
Assert.assertEquals(repl, idealState.getInstanceStateMap(partitionName).size());
}
}
}
protected void setupStateModel(String clusterName) {
ZKHelixDataAccessor accessor =
new ZKHelixDataAccessor(clusterName, new ZkBaseDataAccessor<>(_gZkClient));
Builder keyBuilder = accessor.keyBuilder();
StateModelDefinition masterSlave =
new StateModelDefinition(StateModelConfigGenerator.generateConfigForMasterSlave());
accessor.setProperty(keyBuilder.stateModelDef(masterSlave.getId()), masterSlave);
StateModelDefinition leaderStandby =
new StateModelDefinition(StateModelConfigGenerator.generateConfigForLeaderStandby());
accessor.setProperty(keyBuilder.stateModelDef(leaderStandby.getId()), leaderStandby);
StateModelDefinition onlineOffline =
new StateModelDefinition(StateModelConfigGenerator.generateConfigForOnlineOffline());
accessor.setProperty(keyBuilder.stateModelDef(onlineOffline.getId()), onlineOffline);
}
protected Message createMessage(Message.MessageType type, String msgId, String fromState,
String toState, String resourceName, String tgtName) {
Message msg = new Message(type.toString(), msgId);
msg.setFromState(fromState);
msg.setToState(toState);
msg.getRecord().setSimpleField(Message.Attributes.RESOURCE_NAME.toString(), resourceName);
msg.setTgtName(tgtName);
return msg;
}
protected List<IdealState> setupIdealState(String clusterName, int[] nodes, String[] resources,
int partitions, int replicas) {
ZKHelixDataAccessor accessor =
new ZKHelixDataAccessor(clusterName, new ZkBaseDataAccessor<>(_gZkClient));
Builder keyBuilder = accessor.keyBuilder();
List<IdealState> idealStates = new ArrayList<>();
List<String> instances = new ArrayList<>();
for (int i : nodes) {
instances.add("localhost_" + i);
}
for (String resourceName : resources) {
IdealState idealState = new IdealState(resourceName);
for (int p = 0; p < partitions; p++) {
List<String> value = new ArrayList<>();
for (int r = 0; r < replicas; r++) {
int n = nodes[(p + r) % nodes.length];
value.add("localhost_" + n);
}
idealState.getRecord().setListField(resourceName + "_" + p, value);
}
idealState.setReplicas(Integer.toString(replicas));
idealState.setStateModelDefRef("MasterSlave");
idealState.setRebalanceMode(IdealState.RebalanceMode.SEMI_AUTO);
idealState.setNumPartitions(partitions);
idealStates.add(idealState);
// System.out.println(idealState);
accessor.setProperty(keyBuilder.idealStates(resourceName), idealState);
}
return idealStates;
}
@AfterClass
public void cleanupLiveInstanceOwners() throws InterruptedException {
String testClassName = this.getShortClassName();
System.out.println("AfterClass: " + testClassName + " called.");
for (String cluster : _liveInstanceOwners.keySet()) {
Map<String, HelixZkClient> clientMap = _liveInstanceOwners.get(cluster);
for (HelixZkClient client : clientMap.values()) {
client.close();
}
clientMap.clear();
}
_liveInstanceOwners.clear();
}
protected List<LiveInstance> setupLiveInstances(String clusterName, int[] liveInstances) {
HelixZkClient.ZkClientConfig clientConfig = new HelixZkClient.ZkClientConfig();
clientConfig.setZkSerializer(new ZNRecordSerializer());
List<LiveInstance> result = new ArrayList<>();
for (int i = 0; i < liveInstances.length; i++) {
String instance = "localhost_" + liveInstances[i];
_liveInstanceOwners.putIfAbsent(clusterName, new HashMap<>());
Map<String, HelixZkClient> clientMap = _liveInstanceOwners.get(clusterName);
clientMap.putIfAbsent(instance, DedicatedZkClientFactory.getInstance()
.buildZkClient(new HelixZkClient.ZkConnectionConfig(ZK_ADDR), clientConfig));
HelixZkClient client = clientMap.get(instance);
ZKHelixDataAccessor accessor =
new ZKHelixDataAccessor(clusterName, new ZkBaseDataAccessor<>(client));
Builder keyBuilder = accessor.keyBuilder();
LiveInstance liveInstance = new LiveInstance(instance);
// Keep setting the session id in the deprecated field for ensure the same behavior as a real participant.
// Note the participant is doing so for backward compatibility.
liveInstance.setSessionId(Long.toHexString(client.getSessionId()));
// Please refer to the version requirement here: helix-core/src/main/resources/cluster-manager-version.properties
// Ensuring version compatibility can avoid the warning message during test.
liveInstance.setHelixVersion("0.4");
accessor.setProperty(keyBuilder.liveInstance(instance), liveInstance);
result.add(accessor.getProperty(keyBuilder.liveInstance(instance)));
}
return result;
}
protected void deleteLiveInstances(String clusterName) {
ZKHelixDataAccessor accessor =
new ZKHelixDataAccessor(clusterName, new ZkBaseDataAccessor<>(_gZkClient));
Builder keyBuilder = accessor.keyBuilder();
Map<String, HelixZkClient> clientMap = _liveInstanceOwners.getOrDefault(clusterName, Collections.emptyMap());
for (String liveInstance : accessor.getChildNames(keyBuilder.liveInstances())) {
ZKHelixDataAccessor dataAccessor =
new ZKHelixDataAccessor(clusterName, new ZkBaseDataAccessor<>(_gZkClient));
dataAccessor.removeProperty(keyBuilder.liveInstance(liveInstance));
HelixZkClient client = clientMap.remove(liveInstance);
if (client != null) {
client.close();
}
}
if (clientMap.isEmpty()) {
_liveInstanceOwners.remove(clusterName);
}
}
protected void setupInstances(String clusterName, int[] instances) {
HelixAdmin admin = new ZKHelixAdmin(_gZkClient);
for (int i = 0; i < instances.length; i++) {
String instance = "localhost_" + instances[i];
InstanceConfig instanceConfig = new InstanceConfig(instance);
instanceConfig.setHostName("localhost");
instanceConfig.setPort("" + instances[i]);
instanceConfig.setInstanceOperation(InstanceConstants.InstanceOperation.ENABLE);
admin.addInstance(clusterName, instanceConfig);
}
}
protected void runPipeline(ClusterEvent event, Pipeline pipeline, boolean shouldThrowException)
throws Exception {
try {
pipeline.handle(event);
pipeline.finish();
} catch (Exception e) {
if (shouldThrowException) {
throw e;
} else {
LOG.error("Exception while executing pipeline: {}. Will not continue to next pipeline",
pipeline, e);
}
}
}
protected void runStage(ClusterEvent event, Stage stage) throws Exception {
StageContext context = new StageContext();
stage.init(context);
stage.preProcess();
// AbstractAsyncBaseStage will run asynchronously, and it's main logics are implemented in
// execute() function call
// TODO (harry): duplicated code in ZkIntegrationTestBase, consider moving runStage()
// to a shared library
if (stage instanceof AbstractAsyncBaseStage) {
((AbstractAsyncBaseStage) stage).execute(event);
} else {
stage.process(event);
}
stage.postProcess();
}
protected void deleteCluster(String clusterName) {
TestHelper.dropCluster(clusterName, _gZkClient, _gSetupTool);
}
/**
* Poll for the existence (or lack thereof) of a specific Helix property
* @param clazz the HelixProeprty subclass
* @param accessor connected HelixDataAccessor
* @param key the property key to look up
* @param shouldExist true if the property should exist, false otherwise
* @return the property if found, or null if it does not exist
*/
protected <T extends HelixProperty> T pollForProperty(Class<T> clazz, HelixDataAccessor accessor,
PropertyKey key, boolean shouldExist) throws InterruptedException {
final int POLL_TIMEOUT = 5000;
final int POLL_INTERVAL = 50;
T property = accessor.getProperty(key);
int timeWaited = 0;
while (((shouldExist && property == null) || (!shouldExist && property != null))
&& timeWaited < POLL_TIMEOUT) {
Thread.sleep(POLL_INTERVAL);
timeWaited += POLL_INTERVAL;
property = accessor.getProperty(key);
}
return property;
}
/**
* Ensures that external view and current state are empty
*/
protected static class EmptyZkVerifier implements ClusterStateVerifier.ZkVerifier {
private final String _clusterName;
private final String _resourceName;
private final HelixZkClient _zkClient;
/**
* Instantiate the verifier
* @param clusterName the cluster to verify
* @param resourceName the resource to verify
*/
public EmptyZkVerifier(String clusterName, String resourceName) {
_clusterName = clusterName;
_resourceName = resourceName;
_zkClient = DedicatedZkClientFactory.getInstance()
.buildZkClient(new HelixZkClient.ZkConnectionConfig(ZK_ADDR));
_zkClient.setZkSerializer(new ZNRecordSerializer());
}
@Override
public boolean verify() {
BaseDataAccessor<ZNRecord> baseAccessor = new ZkBaseDataAccessor<ZNRecord>(_zkClient);
HelixDataAccessor accessor = new ZKHelixDataAccessor(_clusterName, baseAccessor);
PropertyKey.Builder keyBuilder = accessor.keyBuilder();
ExternalView externalView = accessor.getProperty(keyBuilder.externalView(_resourceName));
// verify external view empty
if (externalView != null) {
for (String partition : externalView.getPartitionSet()) {
Map<String, String> stateMap = externalView.getStateMap(partition);
if (stateMap != null && !stateMap.isEmpty()) {
LOG.error("External view not empty for " + partition);
return false;
}
}
}
// verify current state empty
List<String> liveParticipants = accessor.getChildNames(keyBuilder.liveInstances());
for (String participant : liveParticipants) {
List<String> sessionIds = accessor.getChildNames(keyBuilder.sessions(participant));
for (String sessionId : sessionIds) {
CurrentState currentState =
accessor.getProperty(keyBuilder.currentState(participant, sessionId, _resourceName));
Map<String, String> partitionStateMap = currentState.getPartitionStateMap();
if (partitionStateMap != null && !partitionStateMap.isEmpty()) {
LOG.error("Current state not empty for " + participant);
return false;
}
}
List<String> taskSessionIds =
accessor.getChildNames(keyBuilder.taskCurrentStateSessions(participant));
for (String sessionId : taskSessionIds) {
CurrentState taskCurrentState = accessor
.getProperty(keyBuilder.taskCurrentState(participant, sessionId, _resourceName));
Map<String, String> taskPartitionStateMap = taskCurrentState.getPartitionStateMap();
if (taskPartitionStateMap != null && !taskPartitionStateMap.isEmpty()) {
LOG.error("Task current state not empty for " + participant);
return false;
}
}
}
return true;
}
@Override
public ZkClient getZkClient() {
return (ZkClient) _zkClient;
}
@Override
public String getClusterName() {
return _clusterName;
}
}
public MockParticipantManager addParticipant(String cluster, String instanceName) {
_gSetupTool.addInstanceToCluster(cluster, instanceName);
MockParticipantManager toAddParticipant =
new MockParticipantManager(ZK_ADDR, cluster, instanceName);
toAddParticipant.syncStart();
return toAddParticipant;
}
public void dropParticipant(String cluster, MockParticipantManager participant) {
if (participant == null) {
return;
}
try {
participant.syncStop();
InstanceConfig instanceConfig =
_gSetupTool.getClusterManagementTool().getInstanceConfig(cluster, participant.getInstanceName());
_gSetupTool.getClusterManagementTool().dropInstance(cluster, instanceConfig);
} catch (Exception e) {
LOG.warn("Error dropping participant " + participant.getInstanceName(), e);
}
}
}
|
googleapis/google-cloud-java | 36,098 | java-container/proto-google-cloud-container-v1/src/main/java/com/google/container/v1/ListOperationsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/container/v1/cluster_service.proto
// Protobuf Java Version: 3.25.8
package com.google.container.v1;
/**
*
*
* <pre>
* ListOperationsRequest lists operations.
* </pre>
*
* Protobuf type {@code google.container.v1.ListOperationsRequest}
*/
public final class ListOperationsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.container.v1.ListOperationsRequest)
ListOperationsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListOperationsRequest.newBuilder() to construct.
private ListOperationsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListOperationsRequest() {
projectId_ = "";
zone_ = "";
parent_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListOperationsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_ListOperationsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_ListOperationsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1.ListOperationsRequest.class,
com.google.container.v1.ListOperationsRequest.Builder.class);
}
public static final int PROJECT_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object projectId_ = "";
/**
*
*
* <pre>
* Deprecated. The Google Developers Console [project ID or project
* number](https://cloud.google.com/resource-manager/docs/creating-managing-projects).
* This field has been deprecated and replaced by the parent field.
* </pre>
*
* <code>string project_id = 1 [deprecated = true];</code>
*
* @deprecated google.container.v1.ListOperationsRequest.project_id is deprecated. See
* google/container/v1/cluster_service.proto;l=4151
* @return The projectId.
*/
@java.lang.Override
@java.lang.Deprecated
public java.lang.String getProjectId() {
java.lang.Object ref = projectId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
projectId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Deprecated. The Google Developers Console [project ID or project
* number](https://cloud.google.com/resource-manager/docs/creating-managing-projects).
* This field has been deprecated and replaced by the parent field.
* </pre>
*
* <code>string project_id = 1 [deprecated = true];</code>
*
* @deprecated google.container.v1.ListOperationsRequest.project_id is deprecated. See
* google/container/v1/cluster_service.proto;l=4151
* @return The bytes for projectId.
*/
@java.lang.Override
@java.lang.Deprecated
public com.google.protobuf.ByteString getProjectIdBytes() {
java.lang.Object ref = projectId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
projectId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ZONE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object zone_ = "";
/**
*
*
* <pre>
* Deprecated. The name of the Google Compute Engine
* [zone](https://cloud.google.com/compute/docs/zones#available)
* to return operations for, or `-` for all zones. This field has been
* deprecated and replaced by the parent field.
* </pre>
*
* <code>string zone = 2 [deprecated = true];</code>
*
* @deprecated google.container.v1.ListOperationsRequest.zone is deprecated. See
* google/container/v1/cluster_service.proto;l=4157
* @return The zone.
*/
@java.lang.Override
@java.lang.Deprecated
public java.lang.String getZone() {
java.lang.Object ref = zone_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
zone_ = s;
return s;
}
}
/**
*
*
* <pre>
* Deprecated. The name of the Google Compute Engine
* [zone](https://cloud.google.com/compute/docs/zones#available)
* to return operations for, or `-` for all zones. This field has been
* deprecated and replaced by the parent field.
* </pre>
*
* <code>string zone = 2 [deprecated = true];</code>
*
* @deprecated google.container.v1.ListOperationsRequest.zone is deprecated. See
* google/container/v1/cluster_service.proto;l=4157
* @return The bytes for zone.
*/
@java.lang.Override
@java.lang.Deprecated
public com.google.protobuf.ByteString getZoneBytes() {
java.lang.Object ref = zone_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
zone_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PARENT_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* The parent (project and location) where the operations will be listed.
* Specified in the format `projects/*/locations/*`.
* Location "-" matches all zones and all regions.
* </pre>
*
* <code>string parent = 4;</code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* The parent (project and location) where the operations will be listed.
* Specified in the format `projects/*/locations/*`.
* Location "-" matches all zones and all regions.
* </pre>
*
* <code>string parent = 4;</code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, projectId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, zone_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, parent_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, projectId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, zone_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, parent_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.container.v1.ListOperationsRequest)) {
return super.equals(obj);
}
com.google.container.v1.ListOperationsRequest other =
(com.google.container.v1.ListOperationsRequest) obj;
if (!getProjectId().equals(other.getProjectId())) return false;
if (!getZone().equals(other.getZone())) return false;
if (!getParent().equals(other.getParent())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PROJECT_ID_FIELD_NUMBER;
hash = (53 * hash) + getProjectId().hashCode();
hash = (37 * hash) + ZONE_FIELD_NUMBER;
hash = (53 * hash) + getZone().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.container.v1.ListOperationsRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1.ListOperationsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1.ListOperationsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1.ListOperationsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1.ListOperationsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1.ListOperationsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1.ListOperationsRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1.ListOperationsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1.ListOperationsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.container.v1.ListOperationsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1.ListOperationsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1.ListOperationsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.container.v1.ListOperationsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* ListOperationsRequest lists operations.
* </pre>
*
* Protobuf type {@code google.container.v1.ListOperationsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.container.v1.ListOperationsRequest)
com.google.container.v1.ListOperationsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_ListOperationsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_ListOperationsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1.ListOperationsRequest.class,
com.google.container.v1.ListOperationsRequest.Builder.class);
}
// Construct using com.google.container.v1.ListOperationsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
projectId_ = "";
zone_ = "";
parent_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_ListOperationsRequest_descriptor;
}
@java.lang.Override
public com.google.container.v1.ListOperationsRequest getDefaultInstanceForType() {
return com.google.container.v1.ListOperationsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.container.v1.ListOperationsRequest build() {
com.google.container.v1.ListOperationsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.container.v1.ListOperationsRequest buildPartial() {
com.google.container.v1.ListOperationsRequest result =
new com.google.container.v1.ListOperationsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.container.v1.ListOperationsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.projectId_ = projectId_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.zone_ = zone_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.parent_ = parent_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.container.v1.ListOperationsRequest) {
return mergeFrom((com.google.container.v1.ListOperationsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.container.v1.ListOperationsRequest other) {
if (other == com.google.container.v1.ListOperationsRequest.getDefaultInstance()) return this;
if (!other.getProjectId().isEmpty()) {
projectId_ = other.projectId_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getZone().isEmpty()) {
zone_ = other.zone_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
projectId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
zone_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 34:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object projectId_ = "";
/**
*
*
* <pre>
* Deprecated. The Google Developers Console [project ID or project
* number](https://cloud.google.com/resource-manager/docs/creating-managing-projects).
* This field has been deprecated and replaced by the parent field.
* </pre>
*
* <code>string project_id = 1 [deprecated = true];</code>
*
* @deprecated google.container.v1.ListOperationsRequest.project_id is deprecated. See
* google/container/v1/cluster_service.proto;l=4151
* @return The projectId.
*/
@java.lang.Deprecated
public java.lang.String getProjectId() {
java.lang.Object ref = projectId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
projectId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Deprecated. The Google Developers Console [project ID or project
* number](https://cloud.google.com/resource-manager/docs/creating-managing-projects).
* This field has been deprecated and replaced by the parent field.
* </pre>
*
* <code>string project_id = 1 [deprecated = true];</code>
*
* @deprecated google.container.v1.ListOperationsRequest.project_id is deprecated. See
* google/container/v1/cluster_service.proto;l=4151
* @return The bytes for projectId.
*/
@java.lang.Deprecated
public com.google.protobuf.ByteString getProjectIdBytes() {
java.lang.Object ref = projectId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
projectId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Deprecated. The Google Developers Console [project ID or project
* number](https://cloud.google.com/resource-manager/docs/creating-managing-projects).
* This field has been deprecated and replaced by the parent field.
* </pre>
*
* <code>string project_id = 1 [deprecated = true];</code>
*
* @deprecated google.container.v1.ListOperationsRequest.project_id is deprecated. See
* google/container/v1/cluster_service.proto;l=4151
* @param value The projectId to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setProjectId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
projectId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Deprecated. The Google Developers Console [project ID or project
* number](https://cloud.google.com/resource-manager/docs/creating-managing-projects).
* This field has been deprecated and replaced by the parent field.
* </pre>
*
* <code>string project_id = 1 [deprecated = true];</code>
*
* @deprecated google.container.v1.ListOperationsRequest.project_id is deprecated. See
* google/container/v1/cluster_service.proto;l=4151
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder clearProjectId() {
projectId_ = getDefaultInstance().getProjectId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Deprecated. The Google Developers Console [project ID or project
* number](https://cloud.google.com/resource-manager/docs/creating-managing-projects).
* This field has been deprecated and replaced by the parent field.
* </pre>
*
* <code>string project_id = 1 [deprecated = true];</code>
*
* @deprecated google.container.v1.ListOperationsRequest.project_id is deprecated. See
* google/container/v1/cluster_service.proto;l=4151
* @param value The bytes for projectId to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setProjectIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
projectId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object zone_ = "";
/**
*
*
* <pre>
* Deprecated. The name of the Google Compute Engine
* [zone](https://cloud.google.com/compute/docs/zones#available)
* to return operations for, or `-` for all zones. This field has been
* deprecated and replaced by the parent field.
* </pre>
*
* <code>string zone = 2 [deprecated = true];</code>
*
* @deprecated google.container.v1.ListOperationsRequest.zone is deprecated. See
* google/container/v1/cluster_service.proto;l=4157
* @return The zone.
*/
@java.lang.Deprecated
public java.lang.String getZone() {
java.lang.Object ref = zone_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
zone_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Deprecated. The name of the Google Compute Engine
* [zone](https://cloud.google.com/compute/docs/zones#available)
* to return operations for, or `-` for all zones. This field has been
* deprecated and replaced by the parent field.
* </pre>
*
* <code>string zone = 2 [deprecated = true];</code>
*
* @deprecated google.container.v1.ListOperationsRequest.zone is deprecated. See
* google/container/v1/cluster_service.proto;l=4157
* @return The bytes for zone.
*/
@java.lang.Deprecated
public com.google.protobuf.ByteString getZoneBytes() {
java.lang.Object ref = zone_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
zone_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Deprecated. The name of the Google Compute Engine
* [zone](https://cloud.google.com/compute/docs/zones#available)
* to return operations for, or `-` for all zones. This field has been
* deprecated and replaced by the parent field.
* </pre>
*
* <code>string zone = 2 [deprecated = true];</code>
*
* @deprecated google.container.v1.ListOperationsRequest.zone is deprecated. See
* google/container/v1/cluster_service.proto;l=4157
* @param value The zone to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setZone(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
zone_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Deprecated. The name of the Google Compute Engine
* [zone](https://cloud.google.com/compute/docs/zones#available)
* to return operations for, or `-` for all zones. This field has been
* deprecated and replaced by the parent field.
* </pre>
*
* <code>string zone = 2 [deprecated = true];</code>
*
* @deprecated google.container.v1.ListOperationsRequest.zone is deprecated. See
* google/container/v1/cluster_service.proto;l=4157
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder clearZone() {
zone_ = getDefaultInstance().getZone();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Deprecated. The name of the Google Compute Engine
* [zone](https://cloud.google.com/compute/docs/zones#available)
* to return operations for, or `-` for all zones. This field has been
* deprecated and replaced by the parent field.
* </pre>
*
* <code>string zone = 2 [deprecated = true];</code>
*
* @deprecated google.container.v1.ListOperationsRequest.zone is deprecated. See
* google/container/v1/cluster_service.proto;l=4157
* @param value The bytes for zone to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setZoneBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
zone_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* The parent (project and location) where the operations will be listed.
* Specified in the format `projects/*/locations/*`.
* Location "-" matches all zones and all regions.
* </pre>
*
* <code>string parent = 4;</code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The parent (project and location) where the operations will be listed.
* Specified in the format `projects/*/locations/*`.
* Location "-" matches all zones and all regions.
* </pre>
*
* <code>string parent = 4;</code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The parent (project and location) where the operations will be listed.
* Specified in the format `projects/*/locations/*`.
* Location "-" matches all zones and all regions.
* </pre>
*
* <code>string parent = 4;</code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The parent (project and location) where the operations will be listed.
* Specified in the format `projects/*/locations/*`.
* Location "-" matches all zones and all regions.
* </pre>
*
* <code>string parent = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The parent (project and location) where the operations will be listed.
* Specified in the format `projects/*/locations/*`.
* Location "-" matches all zones and all regions.
* </pre>
*
* <code>string parent = 4;</code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.container.v1.ListOperationsRequest)
}
// @@protoc_insertion_point(class_scope:google.container.v1.ListOperationsRequest)
private static final com.google.container.v1.ListOperationsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.container.v1.ListOperationsRequest();
}
public static com.google.container.v1.ListOperationsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListOperationsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListOperationsRequest>() {
@java.lang.Override
public ListOperationsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListOperationsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListOperationsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.container.v1.ListOperationsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/freemarker | 35,513 | freemarker-core/src/test/java/freemarker/core/DefaultTruncateBuiltinAlgorithmTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package freemarker.core;
import static freemarker.core.DefaultTruncateBuiltinAlgorithm.*;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
import org.junit.Test;
import freemarker.template.Configuration;
import freemarker.template.SimpleNumber;
import freemarker.template.SimpleScalar;
import freemarker.template.Template;
import freemarker.template.TemplateException;
import freemarker.template.TemplateModel;
import freemarker.template.TemplateModelException;
import freemarker.template.TemplateScalarModel;
public class DefaultTruncateBuiltinAlgorithmTest {
private static final DefaultTruncateBuiltinAlgorithm EMPTY_TERMINATOR_INSTANCE =
new DefaultTruncateBuiltinAlgorithm("", false);
private static final DefaultTruncateBuiltinAlgorithm DOTS_INSTANCE =
new DefaultTruncateBuiltinAlgorithm("...", true);
private static final DefaultTruncateBuiltinAlgorithm DOTS_NO_W_SPACE_INSTANCE =
new DefaultTruncateBuiltinAlgorithm("...", false);
private static final DefaultTruncateBuiltinAlgorithm ASCII_NO_W_SPACE_INSTANCE =
new DefaultTruncateBuiltinAlgorithm("[...]", false);
private static final DefaultTruncateBuiltinAlgorithm M_TERM_INSTANCE;
static {
try {
M_TERM_INSTANCE = new DefaultTruncateBuiltinAlgorithm(
"...", null, true,
HTMLOutputFormat.INSTANCE.fromMarkup("<r>...</r>"), null, true,
true, 0.75);
} catch (TemplateModelException e) {
throw new RuntimeException(e);
}
}
@Test
public void testConstructorIllegalArguments() throws TemplateException {
try {
new DefaultTruncateBuiltinAlgorithm(
null, null, true,
HTMLOutputFormat.INSTANCE.fromMarkup("<r>...</r>"), null, true,
true, 0.75);
fail();
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("defaultTerminator"));
}
}
@Test
public void testTruncateIllegalArguments() throws TemplateException {
Environment env = createEnvironment();
ASCII_INSTANCE.truncate("", 0, new SimpleScalar("."), 1, env);
try {
ASCII_INSTANCE.truncate("", -1, new SimpleScalar("."), 1, env);
fail();
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("maxLength"));
}
try {
ASCII_INSTANCE.truncateM("sss", 2, new SimpleNumber(1), 1, env);
fail();
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("SimpleNumber"));
}
try {
ASCII_INSTANCE.truncate("sss", 2, new SimpleScalar("."), -1, env);
fail();
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("terminatorLength"));
}
}
private Environment createEnvironment() {
try {
return new Template("", "", new Configuration(Configuration.VERSION_2_3_28)).createProcessingEnvironment(null,
null);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Test
public void testCSimple() {
assertC(ASCII_INSTANCE, "12345678", 9, "12345678");
assertC(ASCII_INSTANCE, "12345678", 8, "12345678");
assertC(ASCII_INSTANCE, "12345678", 7, "12[...]");
assertC(ASCII_INSTANCE, "12345678", 6, "1[...]");
for (int maxLength = 5; maxLength >= 0; maxLength--) {
assertC(ASCII_INSTANCE, "12345678", maxLength, "[...]");
}
assertC(UNICODE_INSTANCE, "12345678", 9, "12345678");
assertC(UNICODE_INSTANCE, "12345678", 8, "12345678");
assertC(UNICODE_INSTANCE, "12345678", 7, "1234[\u2026]");
assertC(UNICODE_INSTANCE, "12345678", 6, "123[\u2026]");
assertC(UNICODE_INSTANCE, "12345678", 5, "12[\u2026]");
assertC(UNICODE_INSTANCE, "12345678", 4, "1[\u2026]");
for (int maxLength = 3; maxLength >= 0; maxLength--) {
assertC(UNICODE_INSTANCE, "12345678", maxLength, "[\u2026]");
}
assertC(EMPTY_TERMINATOR_INSTANCE, "12345678", 9, "12345678");
for (int length = 8; length >= 0; length--) {
assertC(EMPTY_TERMINATOR_INSTANCE, "12345678", length, "12345678".substring(0, length));
}
}
@Test
public void testCSpaceAndDot() {
assertC(ASCII_INSTANCE, "123456 ", 9, "123456 ");
assertC(ASCII_INSTANCE, "123456 ", 8, "123456 ");
assertC(ASCII_INSTANCE, "123456 ", 7, "12[...]");
assertC(ASCII_INSTANCE, "123456 ", 6, "1[...]");
assertC(ASCII_INSTANCE, "123456 ", 5, "[...]");
assertC(ASCII_INSTANCE, "123456 ", 4, "[...]");
assertC(ASCII_INSTANCE, "1 345 ", 13, "1 345 ");
assertC(ASCII_INSTANCE, "1 345 ", 12, "1 345 [...]"); // Not "1 345 [...]"
assertC(ASCII_INSTANCE, "1 345 ", 11, "1 345 [...]");
assertC(ASCII_INSTANCE, "1 345 ", 10, "1 34[...]"); // Not "12345[...]"
assertC(ASCII_INSTANCE, "1 345 ", 9, "1 34[...]");
assertC(ASCII_INSTANCE, "1 345 ", 8, "1 3[...]");
assertC(ASCII_INSTANCE, "1 345 ", 7, "1 [...]");
assertC(ASCII_INSTANCE, "1 345 ", 6, "[...]"); // Not "1[...]"
assertC(ASCII_INSTANCE, "1 345 ", 5, "[...]");
assertC(ASCII_INSTANCE, "1 345 ", 4, "[...]");
assertC(ASCII_NO_W_SPACE_INSTANCE, "1 345 ", 13, "1 345 ");
assertC(ASCII_NO_W_SPACE_INSTANCE, "1 345 ", 12, "1 345[...]"); // Differs!
assertC(ASCII_NO_W_SPACE_INSTANCE, "1 345 ", 11, "1 345[...]"); // Differs!
assertC(ASCII_NO_W_SPACE_INSTANCE, "1 345 ", 10, "1 345[...]"); // Differs!
assertC(ASCII_NO_W_SPACE_INSTANCE, "1 345 ", 9, "1 34[...]");
assertC(ASCII_NO_W_SPACE_INSTANCE, "1 345 ", 8, "1 3[...]");
assertC(ASCII_NO_W_SPACE_INSTANCE, "1 345 ", 7, "1[...]"); // Differs!
assertC(ASCII_NO_W_SPACE_INSTANCE, "1 345 ", 6, "1[...]"); // Differs!
assertC(ASCII_NO_W_SPACE_INSTANCE, "1 345 ", 5, "[...]");
assertC(ASCII_NO_W_SPACE_INSTANCE, "1 345 ", 4, "[...]");
assertC(ASCII_INSTANCE, "1 4567890", 9, "1 4[...]");
assertC(ASCII_INSTANCE, "1 4567890", 8, "1 [...]");
assertC(ASCII_NO_W_SPACE_INSTANCE, "1 4567890", 9, "1 4[...]");
assertC(ASCII_NO_W_SPACE_INSTANCE, "1 4567890", 8, "1[...]");
assertC(ASCII_INSTANCE, " 3456789", 9, " 3456789");
assertC(ASCII_INSTANCE, " 3456789", 8, " 3[...]");
assertC(ASCII_INSTANCE, " 3456789", 7, "[...]");
assertC(ASCII_INSTANCE, " 3456789", 6, "[...]");
assertC(ASCII_NO_W_SPACE_INSTANCE, " 3456789", 8, " 3[...]");
assertC(ASCII_NO_W_SPACE_INSTANCE, " 3456789", 7, "[...]");
// Dots aren't treated specially by default:
assertC(ASCII_INSTANCE, "1. 56...012345", 15, "1. 56...012345");
assertC(ASCII_INSTANCE, "1. 56...012345", 14, "1. 56...[...]");
assertC(ASCII_INSTANCE, "1. 56...012345", 13, "1. 56..[...]");
assertC(ASCII_INSTANCE, "1. 56...012345", 12, "1. 56.[...]");
assertC(ASCII_INSTANCE, "1. 56...012345", 11, "1. 56[...]");
assertC(ASCII_INSTANCE, "1. 56...012345", 10, "1. 5[...]");
assertC(ASCII_INSTANCE, "1. 56...012345", 9, "1. [...]");
assertC(ASCII_INSTANCE, "1. 56...012345", 8, "1. [...]");
assertC(ASCII_INSTANCE, "1. 56...012345", 7, "1[...]");
assertC(ASCII_INSTANCE, "1. 56...012345", 6, "1[...]");
assertC(ASCII_INSTANCE, "1. 56...012345", 5, "[...]");
// Dots are treated specially here:
assertC(DOTS_INSTANCE, "1. 56...012345", 15, "1. 56...012345");
assertC(DOTS_INSTANCE, "1. 56...012345", 14, "1. 56...01...");
assertC(DOTS_INSTANCE, "1. 56...012345", 13, "1. 56...0...");
assertC(DOTS_INSTANCE, "1. 56...012345", 12, "1. 56...");
assertC(DOTS_INSTANCE, "1. 56...012345", 11, "1. 56...");
assertC(DOTS_INSTANCE, "1. 56...012345", 10, "1. 56...");
assertC(DOTS_INSTANCE, "1. 56...012345", 9, "1. 56...");
assertC(DOTS_INSTANCE, "1. 56...012345", 8, "1. 5...");
assertC(DOTS_INSTANCE, "1. 56...012345", 7, "1. ...");
assertC(DOTS_INSTANCE, "1. 56...012345", 6, "1. ...");
assertC(DOTS_INSTANCE, "1. 56...012345", 5, "1...");
assertC(DOTS_INSTANCE, "1. 56...012345", 4, "1...");
assertC(DOTS_INSTANCE, "1. 56...012345", 3, "...");
assertC(DOTS_INSTANCE, "1. 56...012345", 2, "...");
assertC(DOTS_INSTANCE, "1. 56...012345", 1, "...");
assertC(DOTS_INSTANCE, "1. 56...012345", 0, "...");
assertC(DOTS_NO_W_SPACE_INSTANCE, "1. 56...012345", 8, "1. 5...");
assertC(DOTS_NO_W_SPACE_INSTANCE, "1. 56...012345", 7, "1...");
assertC(DOTS_NO_W_SPACE_INSTANCE, "1. 56...012345", 6, "1...");
assertC(DOTS_NO_W_SPACE_INSTANCE, "1. 56...012345", 5, "1...");
assertC(DOTS_NO_W_SPACE_INSTANCE, "1. 56...012345", 4, "1...");
assertC(DOTS_NO_W_SPACE_INSTANCE, "1. 56...012345", 3, "...");
assertC(EMPTY_TERMINATOR_INSTANCE, "ab. cd", 6, "ab. cd");
assertC(EMPTY_TERMINATOR_INSTANCE, "ab. cd", 5, "ab. c");
assertC(EMPTY_TERMINATOR_INSTANCE, "ab. cd", 4, "ab.");
assertC(EMPTY_TERMINATOR_INSTANCE, "ab. cd", 3, "ab.");
assertC(EMPTY_TERMINATOR_INSTANCE, "ab. cd", 2, "ab");
assertC(EMPTY_TERMINATOR_INSTANCE, "ab. cd", 1, "a");
assertC(EMPTY_TERMINATOR_INSTANCE, "ab. cd", 0, "");
}
@Test
public void testWSimple() {
assertW(ASCII_INSTANCE, "word1 word2 word3", 18, "word1 word2 word3");
assertW(ASCII_INSTANCE, "word1 word2 word3", 17, "word1 word2 word3");
assertW(ASCII_INSTANCE, "word1 word2 word3", 16, "word1 [...]");
assertW(ASCII_INSTANCE, "word1 word2 word3", 11, "word1 [...]");
for (int maxLength = 10; maxLength >= 0; maxLength--) {
assertW(ASCII_INSTANCE, "word1 word2 word3", maxLength, "[...]");
}
assertW(UNICODE_INSTANCE, "word1 word2 word3", 18, "word1 word2 word3");
assertW(UNICODE_INSTANCE, "word1 word2 word3", 17, "word1 word2 word3");
assertW(UNICODE_INSTANCE, "word1 word2 word3", 16, "word1 word2 [\u2026]");
assertW(UNICODE_INSTANCE, "word1 word2 word3", 15, "word1 word2 [\u2026]");
assertW(UNICODE_INSTANCE, "word1 word2 word3", 14, "word1 [\u2026]");
assertW(UNICODE_INSTANCE, "word1 word2 word3", 9, "word1 [\u2026]");
for (int maxLength = 8; maxLength >= 0; maxLength--) {
assertW(UNICODE_INSTANCE, "word1 word2 word3", maxLength, "[\u2026]");
}
assertW(EMPTY_TERMINATOR_INSTANCE, "word1 word2 word3", 18, "word1 word2 word3");
assertW(EMPTY_TERMINATOR_INSTANCE, "word1 word2 word3", 17, "word1 word2 word3");
assertW(EMPTY_TERMINATOR_INSTANCE, "word1 word2 word3", 16, "word1 word2");
assertW(EMPTY_TERMINATOR_INSTANCE, "word1 word2 word3", 11, "word1 word2");
assertW(EMPTY_TERMINATOR_INSTANCE, "word1 word2 word3", 10, "word1");
assertW(EMPTY_TERMINATOR_INSTANCE, "word1 word2 word3", 5, "word1");
for (int maxLength = 4; maxLength >= 0; maxLength--) {
assertW(EMPTY_TERMINATOR_INSTANCE, "word1 word2 word3", maxLength, "");
}
}
@Test
public void testWSpaceAndDot() {
assertW(DOTS_INSTANCE, " word1 word2 ", 16, " word1 word2 ");
assertW(DOTS_INSTANCE, " word1 word2 ", 15, " word1 ...");
assertW(DOTS_INSTANCE, " word1 word2 ", 11, " word1 ...");
for (int maxLength = 10; maxLength >= 0; maxLength--) {
assertW(DOTS_INSTANCE, " word1 word2 ", maxLength, "...");
}
assertW(DOTS_NO_W_SPACE_INSTANCE, " word1 word2 ", 16, " word1 word2 ");
assertW(DOTS_NO_W_SPACE_INSTANCE, " word1 word2 ", 15, " word1...");
assertW(DOTS_NO_W_SPACE_INSTANCE, " word1 word2 ", 10, " word1...");
for (int maxLength = 9; maxLength >= 0; maxLength--) {
assertW(DOTS_NO_W_SPACE_INSTANCE, " word1 word2 ", maxLength, "...");
}
assertW(DOTS_INSTANCE, " . . word1.. word2 ", 23, " . . word1.. word2 ");
assertW(DOTS_INSTANCE, " . . word1.. word2 ", 22, " . . word1.. ...");
assertW(DOTS_INSTANCE, " . . word1.. word2 ", 16, " . . word1.. ...");
assertW(DOTS_INSTANCE, " . . word1.. word2 ", 15, " . . ...");
assertW(DOTS_INSTANCE, " . . word1.. word2 ", 8, " . . ...");
assertW(DOTS_INSTANCE, " . . word1.. word2 ", 7, " . ...");
assertW(DOTS_INSTANCE, " . . word1.. word2 ", 6, " . ...");
for (int maxLength = 5; maxLength >= 0; maxLength--) {
assertW(DOTS_INSTANCE, " . . word1.. word2 ", maxLength, "...");
}
assertW(DOTS_NO_W_SPACE_INSTANCE, " . . word1.. word2 ", 23, " . . word1.. word2 ");
assertW(DOTS_NO_W_SPACE_INSTANCE, " . . word1.. word2 ", 22, " . . word1.. word2...");
assertW(DOTS_NO_W_SPACE_INSTANCE, " . . word1.. word2 ", 21, " . . word1...");
for (int maxLength = 13; maxLength >= 0; maxLength--) {
assertW(DOTS_NO_W_SPACE_INSTANCE, " . . word1.. word2 ", maxLength, "...");
}
}
/**
* "Auto" means plain trunce(..) call, because the tested implementation chooses between CB and WB automatically.
*/
@Test
public void testAuto() {
assertAuto(ASCII_INSTANCE, "1 234567 90ABCDEFGHIJKL", 24, "1 234567 90ABCDEFGHIJKL");
assertAuto(ASCII_INSTANCE, "1 234567 90ABCDEFGHIJKL", 23, "1 234567 90ABCDEFGHIJKL");
assertAuto(ASCII_INSTANCE, "1 234567 90ABCDEFGHIJKL", 22, "1 234567 90ABCDEF[...]");
assertAuto(ASCII_INSTANCE, "1 234567 90ABCDEFGHIJKL", 21, "1 234567 90ABCDE[...]");
assertAuto(ASCII_INSTANCE, "1 234567 90ABCDEFGHIJKL", 20, "1 234567 90ABCD[...]");
assertAuto(ASCII_INSTANCE, "1 234567 90ABCDEFGHIJKL", 19, "1 234567 90ABC[...]");
assertAuto(ASCII_INSTANCE, "1 234567 90ABCDEFGHIJKL", 18, "1 234567 [...]");
assertAuto(ASCII_INSTANCE, "1 234567 90ABCDEFGHIJKL", 17, "1 234567 [...]");
assertAuto(ASCII_INSTANCE, "1 234567 90ABCDEFGHIJKL", 16, "1 234567 [...]");
assertAuto(ASCII_INSTANCE, "1 234567 90ABCDEFGHIJKL", 15, "1 234567 [...]");
assertAuto(ASCII_INSTANCE, "1 234567 90ABCDEFGHIJKL", 14, "1 234567 [...]");
assertAuto(ASCII_INSTANCE, "1 234567 90ABCDEFGHIJKL", 13, "1 23456[...]"); // wb space
assertAuto(ASCII_INSTANCE, "1 234567 90ABCDEFGHIJKL", 12, "1 23456[...]");
assertAuto(ASCII_INSTANCE, "1 234567 0ABCDEFGHIJKL", 22, "1 234567 0ABCDEF[...]");
assertAuto(ASCII_INSTANCE, "1 234567 9 ABCDEFGHIJKL", 22, "1 234567 9 ABCDEF[...]");
assertAuto(ASCII_INSTANCE, "1 234567 90 BCDEFGHIJKL", 22, "1 234567 90 [...]");
assertAuto(ASCII_INSTANCE, "1 234567 90A CDEFGHIJKL", 22, "1 234567 90A [...]");
assertAuto(ASCII_INSTANCE, "1 234567 90AB DEFGHIJKL", 22, "1 234567 90AB [...]");
assertAuto(ASCII_INSTANCE, "1 234567 90ABC EFGHIJKL", 22, "1 234567 90ABC [...]");
assertAuto(ASCII_INSTANCE, "1 234567 90ABCD FGHIJKL", 22, "1 234567 90ABCD [...]");
assertAuto(ASCII_INSTANCE, "1 234567 90ABCDE GHIJKL", 22, "1 234567 90ABCDE [...]");
assertAuto(ASCII_INSTANCE, "1 234567 90ABCDEF HIJKL", 22, "1 234567 90ABCDE[...]");
assertAuto(ASCII_INSTANCE, "1 234567 90ABCDEFG IJKL", 22, "1 234567 90ABCDEF[...]");
assertAuto(ASCII_INSTANCE, "1 234567 90ABCDEFGH JKL", 22, "1 234567 90ABCDEF[...]");
assertAuto(ASCII_INSTANCE, "1 234567 90ABCDEFGHI KL", 22, "1 234567 90ABCDEF[...]");
assertAuto(ASCII_INSTANCE, "1 234567 90ABCDEFGHIJ L", 22, "1 234567 90ABCDEF[...]");
assertAuto(ASCII_INSTANCE, "1 234567 90ABCDEFGHIJK ", 22, "1 234567 90ABCDEF[...]");
assertAuto(ASCII_NO_W_SPACE_INSTANCE, "1 234567 0ABCDEFGHIJKL", 22, "1 234567 0ABCDEF[...]");
assertAuto(ASCII_NO_W_SPACE_INSTANCE, "1 234567 9 ABCDEFGHIJKL", 22, "1 234567 9 ABCDEF[...]");
assertAuto(ASCII_NO_W_SPACE_INSTANCE, "1 234567 90 BCDEFGHIJKL", 22, "1 234567 90 BCDEF[...]");
assertAuto(ASCII_NO_W_SPACE_INSTANCE, "1 234567 90A CDEFGHIJKL", 22, "1 234567 90A[...]");
assertAuto(ASCII_NO_W_SPACE_INSTANCE, "1 234567 90AB DEFGHIJKL", 22, "1 234567 90AB[...]");
assertAuto(ASCII_NO_W_SPACE_INSTANCE, "1 234567 90ABC EFGHIJKL", 22, "1 234567 90ABC[...]");
assertAuto(ASCII_NO_W_SPACE_INSTANCE, "1 234567 90ABCD FGHIJKL", 22, "1 234567 90ABCD[...]");
assertAuto(ASCII_NO_W_SPACE_INSTANCE, "1 234567 90ABCDE GHIJKL", 22, "1 234567 90ABCDE[...]");
assertAuto(ASCII_NO_W_SPACE_INSTANCE, "1 234567 90ABCDEF HIJKL", 22, "1 234567 90ABCDEF[...]");
assertAuto(ASCII_NO_W_SPACE_INSTANCE, "1 234567 90ABCDEFG IJKL", 22, "1 234567 90ABCDEF[...]");
assertAuto(ASCII_NO_W_SPACE_INSTANCE, "1 234567 90ABCDEFGH JKL", 22, "1 234567 90ABCDEF[...]");
assertAuto(ASCII_NO_W_SPACE_INSTANCE, "1 234567 90ABCDEFGHI KL", 22, "1 234567 90ABCDEF[...]");
assertAuto(ASCII_NO_W_SPACE_INSTANCE, "1 234567 90ABCDEFGHIJ L", 22, "1 234567 90ABCDEF[...]");
assertAuto(ASCII_NO_W_SPACE_INSTANCE, "1 234567 90ABCDEFGHIJK ", 22, "1 234567 90ABCDEF[...]");
assertAuto(DOTS_INSTANCE, "12390ABCD.. . EFGHIJK .", 24, "12390ABCD.. . EFGHIJK .");
assertAuto(DOTS_INSTANCE, "12390ABCD.. . EFGHIJK .", 23, "12390ABCD.. . ...");
assertAuto(DOTS_INSTANCE, "12390ABCD.. . EFGHIJK .", 22, "12390ABCD.. . ...");
assertAuto(DOTS_INSTANCE, "12390ABCD.. . EFGHIJK .", 21, "12390ABCD.. . ...");
assertAuto(DOTS_INSTANCE, "12390ABCD.. . EFGHIJK .", 20, "12390ABCD.. . ...");
assertAuto(DOTS_INSTANCE, "12390ABCD.. . EFGHIJK .", 19, "12390ABCD.. . ...");
assertAuto(DOTS_INSTANCE, "12390ABCD.. . EFGHIJK .", 18, "12390ABCD.. . ...");
assertAuto(DOTS_INSTANCE, "12390ABCD.. . EFGHIJK .", 17, "12390ABCD.. ...");
assertAuto(DOTS_INSTANCE, "12390ABCD.. . EFGHIJK .", 16, "12390ABCD.. ...");
assertAuto(DOTS_INSTANCE, "12390ABCD.. . EFGHIJK .", 15, "12390ABCD.. ...");
assertAuto(DOTS_INSTANCE, "12390ABCD.. . EFGHIJK .", 14, "12390ABCD...");
assertAuto(DOTS_INSTANCE, "12390ABCD.. . EFGHIJK .", 13, "12390ABCD...");
assertAuto(DOTS_INSTANCE, "12390ABCD.. . EFGHIJK .", 12, "12390ABCD...");
assertAuto(DOTS_INSTANCE, "12390ABCD.. . EFGHIJK .", 11, "12390ABC...");
assertAuto(DOTS_INSTANCE, "word0 word1. word2 w3 . . w4", 27, "word0 word1. word2 w3 . ...");
assertAuto(DOTS_INSTANCE, "word0 word1. word2 w3 . . w4", 26, "word0 word1. word2 w3 ...");
assertAuto(DOTS_INSTANCE, "word0 word1. word2 w3 . . w4", 25, "word0 word1. word2 w3 ...");
assertAuto(DOTS_INSTANCE, "word0 word1. word2 w3 . . w4", 24, "word0 word1. word2 ...");
assertAuto(DOTS_INSTANCE, "word0 word1. word2 w3 . . w4", 22, "word0 word1. word2 ...");
assertAuto(DOTS_INSTANCE, "word0 word1. word2 w3 . . w4", 21, "word0 word1. ...");
assertAuto(DOTS_INSTANCE, "word0 word1. word2 w3 . . w4", 16, "word0 word1. ...");
assertAuto(DOTS_INSTANCE, "word0 word1. word2 w3 . . w4", 15, "word0 word1...");
assertAuto(DOTS_INSTANCE, "word0 word1. word2 w3 . . w4", 14, "word0 word1...");
assertAuto(DOTS_INSTANCE, "word0 word1. word2 w3 . . w4", 13, "word0 word...");
assertAuto(DOTS_INSTANCE, "word0 word1. word2 w3 . . w4", 12, "word0 ...");
assertAuto(DOTS_INSTANCE, "word0 word1. word2 w3 . . w4", 9, "word0 ...");
assertAuto(DOTS_INSTANCE, "word0 word1. word2 w3 . . w4", 8, "word...");
}
@Test
public void testExtremeWordBoundaryMinLengths() {
assertC(ASCII_INSTANCE, "1 3456789", 8, "1 3[...]");
assertW(ASCII_INSTANCE, "1 3456789", 8, "1 [...]");
DefaultTruncateBuiltinAlgorithm wbMinLen1Algo = new DefaultTruncateBuiltinAlgorithm(
ASCII_INSTANCE.getDefaultTerminator(), null, null,
null, null, null,
true, 1.0);
assertAuto(wbMinLen1Algo, "1 3456789", 8, "1 3[...]");
assertAuto(ASCII_INSTANCE, "123456789", 8, "123[...]");
DefaultTruncateBuiltinAlgorithm wbMinLen0Algo = new DefaultTruncateBuiltinAlgorithm(
ASCII_INSTANCE.getDefaultTerminator(), null, null,
null, null, null,
true, 0.0);
assertAuto(wbMinLen0Algo, "123456789", 8, "[...]");
}
@Test
public void testSimpleEdgeCases() throws TemplateException {
Environment env = createEnvironment();
for (final DefaultTruncateBuiltinAlgorithm alg : new DefaultTruncateBuiltinAlgorithm[] {
ASCII_INSTANCE, UNICODE_INSTANCE,
EMPTY_TERMINATOR_INSTANCE, DOTS_INSTANCE, ASCII_NO_W_SPACE_INSTANCE, M_TERM_INSTANCE }) {
for (TruncateCaller tc : new TruncateCaller[] {
new TruncateCaller() {
public TemplateModel truncate(String s, int maxLength, TemplateModel terminator,
Integer terminatorLength, Environment env) throws
TemplateException {
return alg.truncateM(s, maxLength, terminator, terminatorLength, env);
}
},
new TruncateCaller() {
public TemplateModel truncate(String s, int maxLength, TemplateModel terminator,
Integer terminatorLength, Environment env) throws
TemplateException {
return alg.truncateCM(s, maxLength, terminator, terminatorLength, env);
}
},
new TruncateCaller() {
public TemplateModel truncate(String s, int maxLength, TemplateModel terminator,
Integer terminatorLength, Environment env) throws
TemplateException {
return alg.truncateWM(s, maxLength, terminator, terminatorLength, env);
}
}
}) {
assertEquals("", tc.truncate("", 0, null, null, env).toString(), "");
assertEquals("", tc.truncate("", 0, null, null, env).toString(), "");
if (alg.getDefaultMTerminator() != null) {
TemplateModel truncated = tc.truncate("x", 0, null, null, env);
assertThat(truncated, instanceOf(TemplateMarkupOutputModel.class));
assertSame(alg.getDefaultMTerminator(), truncated);
} else {
TemplateModel truncated = tc.truncate("x", 0, null, null, env);
assertThat(truncated, instanceOf(TemplateScalarModel.class));
assertEquals(alg.getDefaultTerminator(), ((TemplateScalarModel) truncated).getAsString());
}
SimpleScalar stringTerminator = new SimpleScalar("|");
assertSame(stringTerminator, tc.truncate("x", 0, stringTerminator, null, env));
TemplateHTMLOutputModel htmlTerminator = HTMLOutputFormat.INSTANCE.fromMarkup("<x>.</x>");
assertSame(htmlTerminator, tc.truncate("x", 0, htmlTerminator, null, env));
}
}
}
@Test
public void testStandardInstanceSettings() throws TemplateException {
Environment env = createEnvironment();
assertEquals(
"123[...]",
ASCII_INSTANCE.truncate("1234567890", 8, null, null, env)
.getAsString());
assertEquals(
"12345<span class='truncateTerminator'>[…]</span>",
HTMLOutputFormat.INSTANCE.getMarkupString(
((TemplateHTMLOutputModel) ASCII_INSTANCE
.truncateM("1234567890", 8, null, null, env))
));
assertEquals(
"12345[\u2026]",
UNICODE_INSTANCE.truncate("1234567890", 8, null, null, env)
.getAsString());
assertEquals(
"12345<span class='truncateTerminator'>[…]</span>",
HTMLOutputFormat.INSTANCE.getMarkupString(
((TemplateHTMLOutputModel) UNICODE_INSTANCE
.truncateM("1234567890", 8, null, null, env))
));
}
private void assertC(TruncateBuiltinAlgorithm algorithm, String in, int maxLength, String expected) {
try {
TemplateScalarModel actual = algorithm.truncateC(in, maxLength, null, null, null);
assertEquals(expected, actual.getAsString());
} catch (TemplateException e) {
throw new RuntimeException(e);
}
}
private void assertW(TruncateBuiltinAlgorithm algorithm, String in, int maxLength, String expected) {
try {
TemplateScalarModel actual = algorithm.truncateW(in, maxLength, null, null, null);
assertEquals(expected, actual.getAsString());
} catch (TemplateException e) {
throw new RuntimeException(e);
}
}
private void assertAuto(TruncateBuiltinAlgorithm algorithm, String in, int maxLength, String expected) {
try {
TemplateScalarModel actual = algorithm.truncate(
in, maxLength, null, null, null);
assertEquals(expected, actual.getAsString());
} catch (TemplateException e) {
throw new RuntimeException(e);
}
}
interface TruncateCaller {
TemplateModel truncate(
String s, int maxLength,
TemplateModel terminator, Integer terminatorLength,
Environment env) throws TemplateException;
}
@Test
public void testGetLengthWithoutTags() {
assertEquals(0, getLengthWithoutTags(""));
assertEquals(1, getLengthWithoutTags("a"));
assertEquals(2, getLengthWithoutTags("ab"));
assertEquals(0, getLengthWithoutTags("<tag>"));
assertEquals(1, getLengthWithoutTags("<tag>a"));
assertEquals(2, getLengthWithoutTags("<tag>a</tag>b"));
assertEquals(4, getLengthWithoutTags("ab<tag>cd</tag>"));
assertEquals(2, getLengthWithoutTags("ab<tag></tag>"));
assertEquals(2, getLengthWithoutTags("&chr;a"));
assertEquals(4, getLengthWithoutTags("&chr;a&chr;b"));
assertEquals(6, getLengthWithoutTags("ab&chr;cd&chr;"));
assertEquals(4, getLengthWithoutTags("ab&chr;&chr;"));
assertEquals(4, getLengthWithoutTags("ab<tag>&chr;</tag>&chr;"));
assertEquals(2, getLengthWithoutTags("<!--c-->ab"));
assertEquals(2, getLengthWithoutTags("a<!--c-->b<!--c-->"));
assertEquals(2, getLengthWithoutTags("a<!-->--><!---->b"));
assertEquals(3, getLengthWithoutTags("a<![CDATA[b]]>c"));
assertEquals(2, getLengthWithoutTags("a<![CDATA[]]>b"));
assertEquals(0, getLengthWithoutTags("<![CDATA[]]>"));
assertEquals(3, getLengthWithoutTags("<![CDATA[123"));
assertEquals(4, getLengthWithoutTags("<![CDATA[123]"));
assertEquals(5, getLengthWithoutTags("<![CDATA[123]]"));
assertEquals(3, getLengthWithoutTags("<![CDATA[123]]>"));
assertEquals(2, getLengthWithoutTags("ab<!--"));
assertEquals(2, getLengthWithoutTags("ab<tag"));
assertEquals(3, getLengthWithoutTags("ab&chr"));
assertEquals(2, getLengthWithoutTags("ab<!-"));
assertEquals(2, getLengthWithoutTags("ab<"));
assertEquals(3, getLengthWithoutTags("ab&"));
assertEquals(3, getLengthWithoutTags("a&;c"));
}
@Test
public void testGetCodeFromNumericalCharReferenceName() {
assertEquals(0, getCodeFromNumericalCharReferenceName("#0"));
assertEquals(0, getCodeFromNumericalCharReferenceName("#00"));
assertEquals(0, getCodeFromNumericalCharReferenceName("#x0"));
assertEquals(0, getCodeFromNumericalCharReferenceName("#x00"));
assertEquals(1, getCodeFromNumericalCharReferenceName("#1"));
assertEquals(1, getCodeFromNumericalCharReferenceName("#01"));
assertEquals(1, getCodeFromNumericalCharReferenceName("#x1"));
assertEquals(1, getCodeFromNumericalCharReferenceName("#x01"));
assertEquals(1, getCodeFromNumericalCharReferenceName("#X1"));
assertEquals(1, getCodeFromNumericalCharReferenceName("#X01"));
assertEquals(123409, getCodeFromNumericalCharReferenceName("#123409"));
assertEquals(123409, getCodeFromNumericalCharReferenceName("#00123409"));
assertEquals(0x123A0F, getCodeFromNumericalCharReferenceName("#x123A0F"));
assertEquals(0x123A0F, getCodeFromNumericalCharReferenceName("#x123a0f"));
assertEquals(0x123A0F, getCodeFromNumericalCharReferenceName("#X00123A0f"));
assertEquals(-1, getCodeFromNumericalCharReferenceName("#x1G"));
assertEquals(-1, getCodeFromNumericalCharReferenceName("#1A"));
}
@Test
public void testIsDotCharReference() {
assertTrue(isDotCharReference("#46"));
assertTrue(isDotCharReference("#x2E"));
assertTrue(isDotCharReference("#x2026"));
assertTrue(isDotCharReference("hellip"));
assertTrue(isDotCharReference("period"));
assertFalse(isDotCharReference(""));
assertFalse(isDotCharReference("foo"));
assertFalse(isDotCharReference("#x46"));
assertFalse(isDotCharReference("#boo"));
}
@Test
public void testIsHtmlOrXmlStartsWithDot() {
assertTrue(doesHtmlOrXmlStartWithDot("."));
assertTrue(doesHtmlOrXmlStartWithDot(".etc"));
assertTrue(doesHtmlOrXmlStartWithDot("…"));
assertTrue(doesHtmlOrXmlStartWithDot("<tag x='y'/>…"));
assertTrue(doesHtmlOrXmlStartWithDot("<span class='t'>...</span>"));
assertTrue(doesHtmlOrXmlStartWithDot("<span class='t'>…</span>"));
assertTrue(doesHtmlOrXmlStartWithDot("<span class='t'>.</span>"));
assertTrue(doesHtmlOrXmlStartWithDot("<foo><!-- -->.etc"));
assertFalse(doesHtmlOrXmlStartWithDot(""));
assertFalse(doesHtmlOrXmlStartWithDot("[...]"));
assertFalse(doesHtmlOrXmlStartWithDot("etc."));
assertFalse(doesHtmlOrXmlStartWithDot("<span class='t'>[...]</span>"));
assertFalse(doesHtmlOrXmlStartWithDot("<span class='t'>etc.</span>"));
assertFalse(doesHtmlOrXmlStartWithDot("<span class='t'>&46;</span>"));
}
@Test
public void testTruncateAdhocHtmlTerminator() throws TemplateException {
Environment env = createEnvironment();
TemplateHTMLOutputModel htmlEllipsis = HTMLOutputFormat.INSTANCE.fromMarkup("<i>…</i>");
TemplateHTMLOutputModel htmlSquEllipsis = HTMLOutputFormat.INSTANCE.fromMarkup("<i>[…]</i>");
// Length detection
{
TemplateModel actual = ASCII_INSTANCE.truncateM("abcd", 3, htmlEllipsis, null, env);
assertThat(actual, instanceOf(TemplateHTMLOutputModel.class));
assertEquals(
"ab<i>…</i>",
HTMLOutputFormat.INSTANCE.getMarkupString((TemplateHTMLOutputModel) actual));
}
{
TemplateModel actual = ASCII_INSTANCE.truncateM("abcdef", 5, htmlSquEllipsis, null, env);
assertThat(actual, instanceOf(TemplateHTMLOutputModel.class));
assertEquals(
"ab<i>[…]</i>",
HTMLOutputFormat.INSTANCE.getMarkupString((TemplateHTMLOutputModel) actual));
}
{
TemplateModel actual = ASCII_INSTANCE.truncateM("abcdef", 5, htmlSquEllipsis, 1, env);
assertThat(actual, instanceOf(TemplateHTMLOutputModel.class));
assertEquals(
"abcd<i>[…]</i>",
HTMLOutputFormat.INSTANCE.getMarkupString((TemplateHTMLOutputModel) actual));
}
// Dot removal
{
TemplateModel actual = ASCII_INSTANCE.truncateM("a.cd", 3, htmlEllipsis, null, env);
assertThat(actual, instanceOf(TemplateHTMLOutputModel.class));
assertEquals(
"a<i>…</i>",
HTMLOutputFormat.INSTANCE.getMarkupString((TemplateHTMLOutputModel) actual));
}
{
TemplateModel actual = ASCII_INSTANCE.truncateM("a.cdef", 5, htmlSquEllipsis, null, env);
assertThat(actual, instanceOf(TemplateHTMLOutputModel.class));
assertEquals(
"a.<i>[…]</i>",
HTMLOutputFormat.INSTANCE.getMarkupString((TemplateHTMLOutputModel) actual));
}
}
@Test
public void testTruncateAdhocPlainTextTerminator() throws TemplateException {
Environment env = createEnvironment();
TemplateScalarModel ellipsis = new SimpleScalar("\u2026");
TemplateScalarModel squEllipsis = new SimpleScalar("[\u2026]");
// Length detection
{
TemplateScalarModel actual = ASCII_INSTANCE.truncate("abcd", 3, ellipsis, null, env);
assertEquals("ab\u2026", actual.getAsString());
}
{
TemplateScalarModel actual = ASCII_INSTANCE.truncate("abcdef", 5, squEllipsis, null, env);
assertEquals("ab[\u2026]", actual.getAsString());
}
{
TemplateScalarModel actual = ASCII_INSTANCE.truncate("abcdef", 5, squEllipsis, 1, env);
assertEquals("abcd[\u2026]", actual.getAsString());
}
// Dot removal
{
TemplateScalarModel actual = ASCII_INSTANCE.truncate("a.cd", 3, ellipsis, null, env);
assertEquals("a\u2026", actual.getAsString());
}
{
TemplateScalarModel actual = ASCII_INSTANCE.truncate("a.cdef", 5, squEllipsis, null, env);
assertEquals("a.[\u2026]", actual.getAsString());
}
}
} |
apache/paimon | 35,819 | paimon-flink/paimon-flink-common/src/test/java/org/apache/paimon/flink/PartialUpdateITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.paimon.flink;
import org.apache.paimon.utils.BlockingIterator;
import org.apache.paimon.utils.CommonTestUtils;
import org.apache.flink.configuration.RestartStrategyOptions;
import org.apache.flink.table.api.config.ExecutionConfigOptions;
import org.apache.flink.table.planner.factories.TestValuesTableFactory;
import org.apache.flink.types.Row;
import org.apache.flink.types.RowKind;
import org.apache.flink.util.CloseableIterator;
import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeoutException;
import java.util.stream.Collectors;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.assertj.core.api.Assertions.fail;
/** ITCase for partial update. */
public class PartialUpdateITCase extends CatalogITCaseBase {
@Override
protected List<String> ddl() {
return Arrays.asList(
"CREATE TABLE IF NOT EXISTS T ("
+ "j INT, k INT, a INT, b INT, c STRING, PRIMARY KEY (j,k) NOT ENFORCED)"
+ " WITH ('merge-engine'='partial-update');",
"CREATE TABLE IF NOT EXISTS dwd_orders ("
+ "OrderID INT, OrderNumber INT, PersonID INT, LastName STRING, FirstName STRING, Age INT, PRIMARY KEY (OrderID) NOT ENFORCED)"
+ " WITH ('merge-engine'='partial-update', 'ignore-delete'='true');",
"CREATE TABLE IF NOT EXISTS ods_orders (OrderID INT, OrderNumber INT, PersonID INT, PRIMARY KEY (OrderID) NOT ENFORCED) WITH ('changelog-producer'='input', 'continuous.discovery-interval'='1s');",
"CREATE TABLE IF NOT EXISTS dim_persons (PersonID INT, LastName STRING, FirstName STRING, Age INT, PRIMARY KEY (PersonID) NOT ENFORCED) WITH ('changelog-producer'='input', 'continuous.discovery-interval'='1s');");
}
@Test
public void testMergeInMemory() {
batchSql(
"INSERT INTO T VALUES "
+ "(1, 2, 3, CAST(NULL AS INT), '5'), "
+ "(1, 2, CAST(NULL AS INT), 6, CAST(NULL AS STRING))");
List<Row> result = batchSql("SELECT * FROM T");
assertThat(result).containsExactlyInAnyOrder(Row.of(1, 2, 3, 6, "5"));
}
@Test
public void testMergeRead() {
batchSql("INSERT INTO T VALUES (1, 2, 3, CAST(NULL AS INT), CAST(NULL AS STRING))");
batchSql("INSERT INTO T VALUES (1, 2, 4, 5, CAST(NULL AS STRING))");
batchSql("INSERT INTO T VALUES (1, 2, 4, CAST(NULL AS INT), '6')");
assertThat(batchSql("SELECT * FROM T")).containsExactlyInAnyOrder(Row.of(1, 2, 4, 5, "6"));
// projection
assertThat(batchSql("SELECT a FROM T")).containsExactlyInAnyOrder(Row.of(4));
// filter
assertThat(batchSql("SELECT * FROM T where b = 5 and c = '6'"))
.containsExactlyInAnyOrder(Row.of(1, 2, 4, 5, "6"));
}
@Test
public void testMergeCompaction() {
// Wait compaction
batchSql("ALTER TABLE T SET ('commit.force-compact'='true')");
// key 1 2
batchSql("INSERT INTO T VALUES (1, 2, 3, CAST(NULL AS INT), CAST(NULL AS STRING))");
batchSql("INSERT INTO T VALUES (1, 2, 4, 5, CAST(NULL AS STRING))");
batchSql("INSERT INTO T VALUES (1, 2, 4, CAST(NULL AS INT), '6')");
// key 1 3
batchSql("INSERT INTO T VALUES (1, 3, CAST(NULL AS INT), 1, '1')");
batchSql("INSERT INTO T VALUES (1, 3, 2, 3, CAST(NULL AS STRING))");
batchSql("INSERT INTO T VALUES (1, 3, CAST(NULL AS INT), 4, CAST(NULL AS STRING))");
assertThat(batchSql("SELECT * FROM T"))
.containsExactlyInAnyOrder(Row.of(1, 2, 4, 5, "6"), Row.of(1, 3, 2, 4, "1"));
}
@Test
public void testForeignKeyJoin() throws Exception {
sEnv.getConfig()
.set(
ExecutionConfigOptions.TABLE_EXEC_SINK_UPSERT_MATERIALIZE,
ExecutionConfigOptions.UpsertMaterialize.NONE);
CloseableIterator<Row> iter =
streamSqlIter(
"INSERT INTO dwd_orders "
+ "SELECT OrderID, OrderNumber, PersonID, CAST(NULL AS STRING), CAST(NULL AS STRING), CAST(NULL AS INT) FROM ods_orders "
+ "UNION ALL "
+ "SELECT OrderID, CAST(NULL AS INT), dim_persons.PersonID, LastName, FirstName, Age FROM dim_persons JOIN ods_orders ON dim_persons.PersonID = ods_orders.PersonID;");
batchSql("INSERT INTO ods_orders VALUES (1, 2, 3)");
batchSql("INSERT INTO dim_persons VALUES (3, 'snow', 'jon', 23)");
CommonTestUtils.waitUtil(
() ->
rowsToList(batchSql("SELECT * FROM dwd_orders"))
.contains(Arrays.asList(1, 2, 3, "snow", "jon", 23)),
Duration.ofSeconds(5),
Duration.ofMillis(200));
batchSql("INSERT INTO ods_orders VALUES (1, 4, 3)");
batchSql("INSERT INTO dim_persons VALUES (3, 'snow', 'targaryen', 23)");
CommonTestUtils.waitUtil(
() ->
rowsToList(batchSql("SELECT * FROM dwd_orders"))
.contains(Arrays.asList(1, 4, 3, "snow", "targaryen", 23)),
Duration.ofSeconds(5),
Duration.ofMillis(200));
iter.close();
}
protected List<List<Object>> rowsToList(List<Row> rows) {
return rows.stream().map(this::toList).collect(Collectors.toList());
}
private List<Object> toList(Row row) {
assertThat(row.getKind()).isIn(RowKind.INSERT, RowKind.UPDATE_AFTER);
List<Object> result = new ArrayList<>();
for (int i = 0; i < row.getArity(); i++) {
result.add(row.getField(i));
}
return result;
}
@Test
public void testStreamingRead() {
assertThatThrownBy(
() -> sEnv.from("T").execute().print(),
"Partial update continuous reading is not supported");
}
@Test
public void testStreamingReadChangelogInput() throws TimeoutException {
sql(
"CREATE TABLE INPUT_T ("
+ "a INT, b INT, c INT, PRIMARY KEY (a) NOT ENFORCED)"
+ " WITH ('merge-engine'='partial-update', 'changelog-producer'='input');");
BlockingIterator<Row, Row> iterator =
BlockingIterator.of(streamSqlIter("SELECT * FROM INPUT_T"));
sql("INSERT INTO INPUT_T VALUES (1, CAST(NULL AS INT), 1)");
assertThat(iterator.collect(1)).containsExactlyInAnyOrder(Row.of(1, null, 1));
sql("INSERT INTO INPUT_T VALUES (1, 1, CAST(NULL AS INT)), (2, 2, 2)");
assertThat(iterator.collect(2))
.containsExactlyInAnyOrder(Row.of(1, 1, null), Row.of(2, 2, 2));
}
@Test
public void testSequenceGroup() {
sql(
"CREATE TABLE SG ("
+ "k INT, a INT, b INT, g_1 INT, c INT, d INT, g_2 INT, PRIMARY KEY (k) NOT ENFORCED)"
+ " WITH ("
+ "'merge-engine'='partial-update', "
+ "'fields.g_1.sequence-group'='a,b', "
+ "'fields.g_2.sequence-group'='c,d');");
sql("INSERT INTO SG VALUES (1, 1, 1, 1, 1, 1, 1)");
// g_2 should not be updated
sql("INSERT INTO SG VALUES (1, 2, 2, 2, 2, 2, CAST(NULL AS INT))");
// select *
assertThat(sql("SELECT * FROM SG")).containsExactlyInAnyOrder(Row.of(1, 2, 2, 2, 1, 1, 1));
// projection
assertThat(sql("SELECT c, d FROM SG")).containsExactlyInAnyOrder(Row.of(1, 1));
// g_1 should not be updated
sql("INSERT INTO SG VALUES (1, 3, 3, 1, 3, 3, 3)");
assertThat(sql("SELECT * FROM SG")).containsExactlyInAnyOrder(Row.of(1, 2, 2, 2, 3, 3, 3));
// d should be updated by null
sql("INSERT INTO SG VALUES (1, 3, 3, 3, 2, 2, CAST(NULL AS INT))");
sql("INSERT INTO SG VALUES (1, 4, 4, 4, 2, 2, CAST(NULL AS INT))");
sql("INSERT INTO SG VALUES (1, 5, 5, 3, 5, CAST(NULL AS INT), 4)");
assertThat(sql("SELECT a, b FROM SG")).containsExactlyInAnyOrder(Row.of(4, 4));
assertThat(sql("SELECT c, d FROM SG")).containsExactlyInAnyOrder(Row.of(5, null));
}
@Test
public void testMultiFieldsSequenceGroup() {
sql(
"CREATE TABLE SG ("
+ "k INT, a INT, b INT, g_1 INT, c INT, d INT, g_2 INT, g_3 INT, PRIMARY KEY (k) NOT ENFORCED)"
+ " WITH ("
+ "'merge-engine'='partial-update', "
+ "'fields.g_1.sequence-group'='a,b', "
+ "'fields.g_2,g_3.sequence-group'='c,d');");
sql("INSERT INTO SG VALUES (1, 1, 1, 1, 1, 1, 1, 1)");
// g_2, g_3 should not be updated
sql("INSERT INTO SG VALUES (1, 2, 2, 2, 2, 2, 1, CAST(NULL AS INT))");
// select *
assertThat(sql("SELECT * FROM SG"))
.containsExactlyInAnyOrder(Row.of(1, 2, 2, 2, 1, 1, 1, 1));
// projection
assertThat(sql("SELECT c, d FROM SG")).containsExactlyInAnyOrder(Row.of(1, 1));
// g_1 should not be updated
sql("INSERT INTO SG VALUES (1, 3, 3, 1, 3, 3, 3, 1)");
assertThat(sql("SELECT * FROM SG"))
.containsExactlyInAnyOrder(Row.of(1, 2, 2, 2, 3, 3, 3, 1));
// d should be updated by null
sql("INSERT INTO SG VALUES (1, 3, 3, 3, 2, 2, CAST(NULL AS INT), 1)");
sql("INSERT INTO SG VALUES (1, 4, 4, 4, 2, 2, CAST(NULL AS INT), 1)");
sql("INSERT INTO SG VALUES (1, 5, 5, 3, 5, CAST(NULL AS INT), 4, 1)");
assertThat(sql("SELECT a, b FROM SG")).containsExactlyInAnyOrder(Row.of(4, 4));
assertThat(sql("SELECT c, d FROM SG")).containsExactlyInAnyOrder(Row.of(5, null));
}
@Test
public void testSequenceGroupWithDefaultAggFunc() {
sql(
"CREATE TABLE SG ("
+ "k INT, a INT, b INT, g_1 INT, c INT, d INT, g_2 INT, PRIMARY KEY (k) NOT ENFORCED)"
+ " WITH ("
+ "'merge-engine'='partial-update', "
+ "'fields.g_1.sequence-group'='a,b', "
+ "'fields.g_2.sequence-group'='c,d', "
+ "'fields.default-aggregate-function'='last_non_null_value');");
sql("INSERT INTO SG VALUES (1, 1, 1, 1, 1, 1, 1)");
// g_2 should not be updated
sql("INSERT INTO SG VALUES (1, 2, 2, 2, 2, 2, CAST(NULL AS INT))");
// select *
assertThat(sql("SELECT * FROM SG")).containsExactlyInAnyOrder(Row.of(1, 2, 2, 2, 1, 1, 1));
// projection
assertThat(sql("SELECT c, d FROM SG")).containsExactlyInAnyOrder(Row.of(1, 1));
// g_1 should not be updated
sql("INSERT INTO SG VALUES (1, 3, 3, 1, 3, 3, 3)");
assertThat(sql("SELECT * FROM SG")).containsExactlyInAnyOrder(Row.of(1, 2, 2, 2, 3, 3, 3));
// d should not be updated by null
sql("INSERT INTO SG VALUES (1, 3, 3, 3, 2, 2, CAST(NULL AS INT))");
sql("INSERT INTO SG VALUES (1, 4, 4, 4, 2, 2, CAST(NULL AS INT))");
sql("INSERT INTO SG VALUES (1, 5, 5, 3, 5, CAST(NULL AS INT), 4)");
assertThat(sql("SELECT a, b FROM SG")).containsExactlyInAnyOrder(Row.of(4, 4));
assertThat(sql("SELECT c, d FROM SG")).containsExactlyInAnyOrder(Row.of(5, 3));
}
@Test
public void testInvalidSequenceGroup() {
Assertions.assertThatThrownBy(
() ->
sql(
"CREATE TABLE SG ("
+ "k INT, a INT, b INT, g_1 INT, c INT, d INT, g_2 INT, PRIMARY KEY (k) NOT ENFORCED)"
+ " WITH ("
+ "'merge-engine'='partial-update', "
+ "'fields.g_0.sequence-group'='a,b', "
+ "'fields.g_2.sequence-group'='c,d');"))
.hasRootCauseMessage("Field g_0 can not be found in table schema.");
Assertions.assertThatThrownBy(
() ->
sql(
"CREATE TABLE SG ("
+ "k INT, a INT, b INT, g_1 INT, c INT, d INT, g_2 INT, PRIMARY KEY (k) NOT ENFORCED)"
+ " WITH ("
+ "'merge-engine'='partial-update', "
+ "'fields.g_1.sequence-group'='a1,b', "
+ "'fields.g_2.sequence-group'='c,d');"))
.hasRootCauseMessage("Field a1 can not be found in table schema.");
Assertions.assertThatThrownBy(
() ->
sql(
"CREATE TABLE SG ("
+ "k INT, a INT, b INT, g_1 INT, c INT, d INT, g_2 INT, PRIMARY KEY (k) NOT ENFORCED)"
+ " WITH ("
+ "'merge-engine'='partial-update', "
+ "'fields.g_1.sequence-group'='a,b', "
+ "'fields.g_2.sequence-group'='a,d');"))
.hasRootCauseMessage(
"Field a is defined repeatedly by multiple groups: [[g_1], [g_2]].");
Assertions.assertThatThrownBy(
() ->
sql(
"CREATE TABLE SG ("
+ "k INT, a INT, b INT, g_1 INT, c INT, d INT, g_2 INT, g_3 INT, PRIMARY KEY (k) NOT ENFORCED)"
+ " WITH ("
+ "'merge-engine'='partial-update', "
+ "'fields.g_1.sequence-group'='a,b', "
+ "'fields.g_2,g_3.sequence-group'='a,d');"))
.hasRootCauseMessage(
"Field a is defined repeatedly by multiple groups: [[g_1], [g_2, g_3]].");
}
@Test
public void testProjectPushDownWithLookupChangelogProducer() {
sql(
"CREATE TABLE IF NOT EXISTS T_P ("
+ "j INT, k INT, a INT, b INT, c STRING, PRIMARY KEY (j,k) NOT ENFORCED)"
+ " WITH ('merge-engine'='partial-update', 'changelog-producer' = 'lookup', "
+ "'fields.a.sequence-group'='j', 'fields.b.sequence-group'='c');");
batchSql("INSERT INTO T_P VALUES (1, 1, 1, 1, '1')");
assertThat(sql("SELECT k, c FROM T_P")).containsExactlyInAnyOrder(Row.of(1, "1"));
}
@Test
public void testLocalMerge() {
sql(
"CREATE TABLE T1 ("
+ "k INT,"
+ "v INT,"
+ "d INT,"
+ "PRIMARY KEY (k, d) NOT ENFORCED) PARTITIONED BY (d) "
+ " WITH ('merge-engine'='partial-update', "
+ "'local-merge-buffer-size'='5m'"
+ ");");
sql("INSERT INTO T1 VALUES (1, CAST(NULL AS INT), 1), (2, 1, 1), (1, 2, 1)");
assertThat(batchSql("SELECT * FROM T1"))
.containsExactlyInAnyOrder(Row.of(1, 2, 1), Row.of(2, 1, 1));
}
@Test
public void testPartialUpdateWithAggregation() {
sql(
"CREATE TABLE AGG ("
+ "k INT, a INT, b INT, g_1 INT, c VARCHAR, g_2 INT, PRIMARY KEY (k) NOT ENFORCED)"
+ " WITH ("
+ "'merge-engine'='partial-update', "
+ "'fields.a.aggregate-function'='sum', "
+ "'fields.g_1.sequence-group'='a', "
+ "'fields.g_2.sequence-group'='c');");
// a in group g_1 with sum agg
// b not in group
// c in group g_2 without agg
sql("INSERT INTO AGG VALUES (1, 1, 1, 1, '1', 1)");
// g_2 should not be updated
sql("INSERT INTO AGG VALUES (1, 2, 2, 2, '2', CAST(NULL AS INT))");
// select *
assertThat(sql("SELECT * FROM AGG")).containsExactlyInAnyOrder(Row.of(1, 3, 2, 2, "1", 1));
// projection
assertThat(sql("SELECT a, c FROM AGG")).containsExactlyInAnyOrder(Row.of(3, "1"));
// g_1 should not be updated
sql("INSERT INTO AGG VALUES (1, 3, 3, 1, '3', 3)");
assertThat(sql("SELECT * FROM AGG")).containsExactlyInAnyOrder(Row.of(1, 6, 3, 2, "3", 3));
sql(
"INSERT INTO AGG VALUES (1, CAST(NULL AS INT), CAST(NULL AS INT), 2, CAST(NULL AS VARCHAR), 4)");
// a keep the last accumulator
// b is not updated to null
// c updated to null
assertThat(sql("SELECT a, b, c FROM AGG")).containsExactlyInAnyOrder(Row.of(6, 3, null));
}
@Test
public void testMultiFieldsSequencePartialUpdateWithAggregation() {
sql(
"CREATE TABLE AGG ("
+ "k INT, a INT, b INT, g_1 INT, c VARCHAR, g_2 INT, g_3 INT, PRIMARY KEY (k) NOT ENFORCED)"
+ " WITH ("
+ "'merge-engine'='partial-update', "
+ "'fields.a.aggregate-function'='sum', "
+ "'fields.g_1,g_3.sequence-group'='a', "
+ "'fields.g_2.sequence-group'='c');");
// a in group g_1, g_3 with sum agg
// b not in group
// c in group g_2 without agg
sql("INSERT INTO AGG VALUES (1, 1, 1, 1, '1', 1, 1)");
// g_2 should not be updated
sql("INSERT INTO AGG VALUES (1, 2, 2, 2, '2', CAST(NULL AS INT), 2)");
// select *
assertThat(sql("SELECT * FROM AGG"))
.containsExactlyInAnyOrder(Row.of(1, 3, 2, 2, "1", 1, 2));
// projection
assertThat(sql("SELECT a, c FROM AGG")).containsExactlyInAnyOrder(Row.of(3, "1"));
// g_1 should not be updated
sql("INSERT INTO AGG VALUES (1, 3, 3, 2, '3', 3, 1)");
assertThat(sql("SELECT * FROM AGG"))
.containsExactlyInAnyOrder(Row.of(1, 6, 3, 2, "3", 3, 2));
sql(
"INSERT INTO AGG VALUES (1, CAST(NULL AS INT), CAST(NULL AS INT), 2, CAST(NULL AS VARCHAR), 4, 2)");
// a keep the last accumulator
// b is not updated to null
// c updated to null
assertThat(sql("SELECT a, b, c FROM AGG")).containsExactlyInAnyOrder(Row.of(6, 3, null));
}
@Test
public void testPartialUpdateWithDefaultAndFieldAggregation() {
sql(
"CREATE TABLE AGG ("
+ "k INT, a INT, b INT, g_1 INT, c VARCHAR, g_2 INT, PRIMARY KEY (k) NOT ENFORCED)"
+ " WITH ("
+ "'merge-engine'='partial-update', "
+ "'fields.a.aggregate-function'='sum', "
+ "'fields.g_1.sequence-group'='a', "
+ "'fields.g_2.sequence-group'='c', "
+ "'fields.default-aggregate-function'='last_non_null_value');");
// a in group g_1 with sum agg
// b not in group
// c in group g_2 without agg
sql("INSERT INTO AGG VALUES (1, 1, 1, 1, '1', 1)");
// g_2 should not be updated
sql("INSERT INTO AGG VALUES (1, 2, 2, 2, '2', CAST(NULL AS INT))");
// select *
assertThat(sql("SELECT * FROM AGG")).containsExactlyInAnyOrder(Row.of(1, 3, 2, 2, "1", 1));
// projection
assertThat(sql("SELECT a, c FROM AGG")).containsExactlyInAnyOrder(Row.of(3, "1"));
// g_1 should not be updated
sql("INSERT INTO AGG VALUES (1, 3, 3, 1, '3', 3)");
assertThat(sql("SELECT * FROM AGG")).containsExactlyInAnyOrder(Row.of(1, 6, 3, 2, "3", 3));
sql(
"INSERT INTO AGG VALUES (1, CAST(NULL AS INT), CAST(NULL AS INT), 2, CAST(NULL AS VARCHAR), 4)");
// a keep the last accumulator
// b is not updated to null
// c is updated to "3" for default agg func last_non_null_value
assertThat(sql("SELECT a, b, c FROM AGG")).containsExactlyInAnyOrder(Row.of(6, 3, "3"));
}
@Test
public void testFirstValuePartialUpdate() {
sql(
"CREATE TABLE AGG ("
+ "k INT, a INT, g_1 INT, PRIMARY KEY (k) NOT ENFORCED)"
+ " WITH ("
+ "'merge-engine'='partial-update', "
+ "'fields.g_1.sequence-group'='a', "
+ "'fields.a.aggregate-function'='first_value');");
sql("INSERT INTO AGG VALUES (1, 1, 1), (1, 2, 2)");
assertThat(sql("SELECT * FROM AGG")).containsExactlyInAnyOrder(Row.of(1, 1, 2));
// old sequence
sql("INSERT INTO AGG VALUES (1, 0, 0)");
assertThat(sql("SELECT * FROM AGG")).containsExactlyInAnyOrder(Row.of(1, 0, 2));
}
@Test
public void testNoSinkMaterializer() {
sEnv.getConfig()
.set(
ExecutionConfigOptions.TABLE_EXEC_SINK_UPSERT_MATERIALIZE,
ExecutionConfigOptions.UpsertMaterialize.FORCE);
sEnv.getConfig().set(RestartStrategyOptions.RESTART_STRATEGY, "none");
String sql =
"INSERT INTO dwd_orders "
+ "SELECT OrderID, OrderNumber, PersonID, CAST(NULL AS STRING), CAST(NULL AS STRING), CAST(NULL AS INT) FROM ods_orders "
+ "UNION ALL "
+ "SELECT OrderID, CAST(NULL AS INT), dim_persons.PersonID, LastName, FirstName, Age FROM dim_persons JOIN ods_orders ON dim_persons.PersonID = ods_orders.PersonID;";
try {
sEnv.executeSql(sql).await();
fail("Expecting exception");
} catch (Exception e) {
assertThat(e)
.hasRootCauseMessage(
"Sink materializer must not be used with Paimon sink. "
+ "Please set 'table.exec.sink.upsert-materialize' to 'NONE' in Flink's config.");
}
}
@Test
public void testPartialUpdateProjectionPushDownWithDeleteMessage() throws Exception {
List<Row> input = Arrays.asList(Row.ofKind(RowKind.INSERT, 1, 1, 1));
String id = TestValuesTableFactory.registerData(input);
// create temp table in stream table env
sEnv.executeSql(
String.format(
"CREATE TEMPORARY TABLE source (k INT, a INT, g_1 INT, PRIMARY KEY (k) NOT ENFORCED) "
+ "WITH ('connector'='values', 'bounded'='true', 'data-id'='%s', "
+ "'changelog-mode' = 'I,D,UA,UB')",
id));
sql(
"CREATE TABLE TEST ("
+ "k INT, a INT, b INT, g_1 INT, g_2 INT, PRIMARY KEY (k) NOT ENFORCED)"
+ " WITH ("
+ "'merge-engine'='partial-update', "
+ "'fields.g_1.sequence-group'='a', "
+ "'fields.g_2.sequence-group'='b');");
CloseableIterator<Row> insert1 =
streamSqlIter(
"INSERT INTO TEST SELECT k, a, CAST(NULL AS INT) AS b, g_1,"
+ " CAST(NULL AS INT) as g_2 FROM source");
sqlAssertWithRetry(
"SELECT * FROM TEST",
list -> list.containsExactlyInAnyOrder(Row.of(1, 1, null, 1, null)));
// insert the delete message
input = Arrays.asList(Row.ofKind(RowKind.DELETE, 1, 1, 2));
id = TestValuesTableFactory.registerData(input);
// create temp table in stream table env
sEnv.executeSql(
String.format(
"CREATE TEMPORARY TABLE source2 (k INT, a INT, g_1 INT) "
+ "WITH ('connector'='values', 'bounded'='true', 'data-id'='%s')",
id));
CloseableIterator<Row> insert2 =
streamSqlIter(
"INSERT INTO TEST SELECT k, a, CAST(NULL AS INT) AS b, g_1,"
+ " CAST(NULL AS INT) as g_2 FROM source2");
sqlAssertWithRetry(
"SELECT * FROM TEST",
list -> list.containsExactlyInAnyOrder(Row.of(1, null, null, 2, null)));
assertThat(sql("SELECT COUNT(*) FROM TEST")).containsExactlyInAnyOrder(Row.of(1L));
insert1.close();
insert2.close();
}
@ParameterizedTest(name = "localMergeEnabled = {0}")
@ValueSource(booleans = {true, false})
public void testIgnoreDelete(boolean localMerge) throws Exception {
sql(
"CREATE TABLE ignore_delete (pk INT PRIMARY KEY NOT ENFORCED, a STRING, b STRING) WITH ("
+ " 'merge-engine' = 'partial-update',"
+ " 'ignore-delete' = 'true',"
+ " 'changelog-producer' = 'lookup'"
+ ")");
if (localMerge) {
sql("ALTER TABLE ignore_delete SET ('local-merge-buffer-size' = '5m')");
}
sql("INSERT INTO ignore_delete VALUES (1, CAST (NULL AS STRING), 'apple')");
String id =
TestValuesTableFactory.registerData(
Collections.singletonList(Row.ofKind(RowKind.DELETE, 1, null, "apple")));
streamSqlIter(
"CREATE TEMPORARY TABLE input (pk INT PRIMARY KEY NOT ENFORCED, a STRING, b STRING) "
+ "WITH ('connector'='values', 'bounded'='true', 'data-id'='%s', "
+ "'changelog-mode' = 'I,D')",
id)
.close();
sEnv.executeSql("INSERT INTO ignore_delete SELECT * FROM input").await();
sql("INSERT INTO ignore_delete VALUES (1, 'A', CAST (NULL AS STRING))");
// batch read
assertThat(sql("SELECT * FROM ignore_delete"))
.containsExactlyInAnyOrder(Row.of(1, "A", "apple"));
// streaming read results has -U
BlockingIterator<Row, Row> iterator =
streamSqlBlockIter(
"SELECT * FROM ignore_delete /*+ OPTIONS('scan.timestamp-millis' = '0') */");
assertThat(iterator.collect(3))
.containsExactly(
Row.ofKind(RowKind.INSERT, 1, null, "apple"),
Row.ofKind(RowKind.UPDATE_BEFORE, 1, null, "apple"),
Row.ofKind(RowKind.UPDATE_AFTER, 1, "A", "apple"));
iterator.close();
}
@Test
public void testRemoveRecordOnDeleteWithoutSequenceGroup() throws Exception {
sql(
"CREATE TABLE remove_record_on_delete (pk INT PRIMARY KEY NOT ENFORCED, a STRING, b STRING) WITH ("
+ " 'merge-engine' = 'partial-update',"
+ " 'partial-update.remove-record-on-delete' = 'true'"
+ ")");
sql("INSERT INTO remove_record_on_delete VALUES (1, CAST (NULL AS STRING), 'apple')");
// delete record
sql("DELETE FROM remove_record_on_delete WHERE pk = 1");
// batch read
assertThat(sql("SELECT * FROM remove_record_on_delete")).isEmpty();
// insert records
sql("INSERT INTO remove_record_on_delete VALUES (1, CAST (NULL AS STRING), 'apache')");
sql("INSERT INTO remove_record_on_delete VALUES (1, 'A', CAST (NULL AS STRING))");
// batch read
assertThat(sql("SELECT * FROM remove_record_on_delete"))
.containsExactlyInAnyOrder(Row.of(1, "A", "apache"));
// delete record with changelog stream
String id =
TestValuesTableFactory.registerData(
Collections.singletonList(Row.ofKind(RowKind.DELETE, 1, "A", null)));
sEnv.executeSql(
String.format(
"CREATE TEMPORARY TABLE delete_source1 (pk INT, a STRING, b STRING) "
+ "WITH ('connector'='values', 'bounded'='true', 'data-id'='%s', "
+ "'changelog-mode' = 'I,D,UA,UB')",
id));
sEnv.executeSql("INSERT INTO remove_record_on_delete SELECT * FROM delete_source1").await();
assertThat(sql("SELECT * FROM remove_record_on_delete")).isEmpty();
}
@Test
public void testRemoveRecordOnDeleteWithSequenceGroup() throws Exception {
sql(
"CREATE TABLE remove_record_on_delete_sequence_group"
+ " (pk INT PRIMARY KEY NOT ENFORCED, a STRING, seq_a INT, b STRING, seq_b INT) WITH ("
+ " 'merge-engine' = 'partial-update',"
+ " 'fields.seq_a.sequence-group' = 'a',"
+ " 'fields.seq_b.sequence-group' = 'b',"
+ " 'partial-update.remove-record-on-sequence-group' = 'seq_a'"
+ ")");
sql("INSERT INTO remove_record_on_delete_sequence_group VALUES (1, 'apple', 2, 'a', 1)");
sql("INSERT INTO remove_record_on_delete_sequence_group VALUES (1, 'banana', 1, 'b', 2)");
assertThat(sql("SELECT * FROM remove_record_on_delete_sequence_group"))
.containsExactlyInAnyOrder(Row.of(1, "apple", 2, "b", 2));
// delete with seq_b won't delete record but retract b
String id =
TestValuesTableFactory.registerData(
Collections.singletonList(
Row.ofKind(RowKind.DELETE, 1, null, null, "b", 2)));
sEnv.executeSql(
String.format(
"CREATE TEMPORARY TABLE delete_source1 (pk INT, a STRING, seq_a INT, b STRING, seq_b INT) "
+ "WITH ('connector'='values', 'bounded'='true', 'data-id'='%s', "
+ "'changelog-mode' = 'I,D,UA,UB')",
id));
sEnv.executeSql(
"INSERT INTO remove_record_on_delete_sequence_group SELECT * FROM delete_source1")
.await();
assertThat(sql("SELECT * FROM remove_record_on_delete_sequence_group"))
.containsExactlyInAnyOrder(Row.of(1, "apple", 2, null, 2));
// delete record with seq_a
String id2 =
TestValuesTableFactory.registerData(
Collections.singletonList(
Row.ofKind(RowKind.DELETE, 1, "apple", 2, null, null)));
sEnv.executeSql(
String.format(
"CREATE TEMPORARY TABLE delete_source2 (pk INT, a STRING, seq_a INT, b STRING, seq_b INT) "
+ "WITH ('connector'='values', 'bounded'='true', 'data-id'='%s', "
+ "'changelog-mode' = 'I,D,UA,UB')",
id2));
sEnv.executeSql(
"INSERT INTO remove_record_on_delete_sequence_group SELECT * FROM delete_source2")
.await();
assertThat(sql("SELECT * FROM remove_record_on_delete_sequence_group")).isEmpty();
// batch delete record
sql(
"INSERT INTO remove_record_on_delete_sequence_group VALUES (2, 'flink', 2, 'paimon', 1)");
sql("DELETE FROM remove_record_on_delete_sequence_group WHERE pk = 2");
assertThat(sql("SELECT * FROM remove_record_on_delete_sequence_group")).isEmpty();
}
@Test
public void testRemoveRecordOnDeleteLookup() throws Exception {
sql(
"CREATE TABLE remove_record_on_delete (pk INT PRIMARY KEY NOT ENFORCED, a STRING, b STRING) WITH ("
+ " 'merge-engine' = 'partial-update',"
+ " 'partial-update.remove-record-on-delete' = 'true',"
+ " 'changelog-producer' = 'lookup'"
+ ")");
sql("INSERT INTO remove_record_on_delete VALUES (1, CAST (NULL AS STRING), 'apple')");
// delete record
sql("DELETE FROM remove_record_on_delete WHERE pk = 1");
// batch read
assertThat(sql("SELECT * FROM remove_record_on_delete")).isEmpty();
// insert records
sql("INSERT INTO remove_record_on_delete VALUES (1, CAST (NULL AS STRING), 'apache')");
sql("INSERT INTO remove_record_on_delete VALUES (1, 'A', CAST (NULL AS STRING))");
// batch read
assertThat(sql("SELECT * FROM remove_record_on_delete"))
.containsExactlyInAnyOrder(Row.of(1, "A", "apache"));
// streaming read results has -U
BlockingIterator<Row, Row> iterator =
streamSqlBlockIter(
"SELECT * FROM remove_record_on_delete /*+ OPTIONS('scan.timestamp-millis' = '0') */");
assertThat(iterator.collect(5))
.containsExactly(
Row.ofKind(RowKind.INSERT, 1, null, "apple"),
Row.ofKind(RowKind.DELETE, 1, null, "apple"),
Row.ofKind(RowKind.INSERT, 1, null, "apache"),
Row.ofKind(RowKind.UPDATE_BEFORE, 1, null, "apache"),
Row.ofKind(RowKind.UPDATE_AFTER, 1, "A", "apache"));
iterator.close();
}
@Test
public void testSequenceGroupWithDefaultAgg() {
sql(
"CREATE TABLE seq_default_agg ("
+ " pk INT PRIMARY KEY NOT ENFORCED,"
+ " seq INT,"
+ " v INT) WITH ("
+ " 'merge-engine'='partial-update',"
+ " 'fields.seq.sequence-group'='v',"
+ " 'fields.default-aggregate-function'='sum'"
+ ")");
sql("INSERT INTO seq_default_agg VALUES (0, 1, 1)");
sql("INSERT INTO seq_default_agg VALUES (0, 2, 2)");
assertThat(sql("SELECT * FROM seq_default_agg")).containsExactly(Row.of(0, 2, 3));
}
}
|
googleapis/google-cloud-java | 36,061 | java-securitycentermanagement/proto-google-cloud-securitycentermanagement-v1/src/main/java/com/google/cloud/securitycentermanagement/v1/ListEffectiveEventThreatDetectionCustomModulesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/securitycentermanagement/v1/security_center_management.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.securitycentermanagement.v1;
/**
*
*
* <pre>
* Request message for
* [SecurityCenterManagement.ListEffectiveEventThreatDetectionCustomModules][google.cloud.securitycentermanagement.v1.SecurityCenterManagement.ListEffectiveEventThreatDetectionCustomModules].
* </pre>
*
* Protobuf type {@code
* google.cloud.securitycentermanagement.v1.ListEffectiveEventThreatDetectionCustomModulesRequest}
*/
public final class ListEffectiveEventThreatDetectionCustomModulesRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.securitycentermanagement.v1.ListEffectiveEventThreatDetectionCustomModulesRequest)
ListEffectiveEventThreatDetectionCustomModulesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListEffectiveEventThreatDetectionCustomModulesRequest.newBuilder() to construct.
private ListEffectiveEventThreatDetectionCustomModulesRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListEffectiveEventThreatDetectionCustomModulesRequest() {
parent_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListEffectiveEventThreatDetectionCustomModulesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycentermanagement.v1.SecurityCenterManagementProto
.internal_static_google_cloud_securitycentermanagement_v1_ListEffectiveEventThreatDetectionCustomModulesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycentermanagement.v1.SecurityCenterManagementProto
.internal_static_google_cloud_securitycentermanagement_v1_ListEffectiveEventThreatDetectionCustomModulesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest.class,
com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Name of parent to list effective custom modules, in one of the
* following formats:
*
* * `organizations/{organization}/locations/{location}`
* * `folders/{folder}/locations/{location}`
* * `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Name of parent to list effective custom modules, in one of the
* following formats:
*
* * `organizations/{organization}/locations/{location}`
* * `folders/{folder}/locations/{location}`
* * `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. The maximum number of results to return in a single response.
* Default is 10, minimum is 1, maximum is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous request. Provide this
* token to retrieve the next page of results.
*
* When paginating, the rest of the request must match the request that
* generated the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous request. Provide this
* token to retrieve the next page of results.
*
* When paginating, the rest of the request must match the request that
* generated the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof
com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest)) {
return super.equals(obj);
}
com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
other =
(com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest)
obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [SecurityCenterManagement.ListEffectiveEventThreatDetectionCustomModules][google.cloud.securitycentermanagement.v1.SecurityCenterManagement.ListEffectiveEventThreatDetectionCustomModules].
* </pre>
*
* Protobuf type {@code
* google.cloud.securitycentermanagement.v1.ListEffectiveEventThreatDetectionCustomModulesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.securitycentermanagement.v1.ListEffectiveEventThreatDetectionCustomModulesRequest)
com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycentermanagement.v1.SecurityCenterManagementProto
.internal_static_google_cloud_securitycentermanagement_v1_ListEffectiveEventThreatDetectionCustomModulesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycentermanagement.v1.SecurityCenterManagementProto
.internal_static_google_cloud_securitycentermanagement_v1_ListEffectiveEventThreatDetectionCustomModulesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest.class,
com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest.Builder.class);
}
// Construct using
// com.google.cloud.securitycentermanagement.v1.ListEffectiveEventThreatDetectionCustomModulesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.securitycentermanagement.v1.SecurityCenterManagementProto
.internal_static_google_cloud_securitycentermanagement_v1_ListEffectiveEventThreatDetectionCustomModulesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
getDefaultInstanceForType() {
return com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
build() {
com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
buildPartial() {
com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
result =
new com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof
com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest) {
return mergeFrom(
(com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest)
other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
other) {
if (other
== com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Name of parent to list effective custom modules, in one of the
* following formats:
*
* * `organizations/{organization}/locations/{location}`
* * `folders/{folder}/locations/{location}`
* * `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Name of parent to list effective custom modules, in one of the
* following formats:
*
* * `organizations/{organization}/locations/{location}`
* * `folders/{folder}/locations/{location}`
* * `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Name of parent to list effective custom modules, in one of the
* following formats:
*
* * `organizations/{organization}/locations/{location}`
* * `folders/{folder}/locations/{location}`
* * `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of parent to list effective custom modules, in one of the
* following formats:
*
* * `organizations/{organization}/locations/{location}`
* * `folders/{folder}/locations/{location}`
* * `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of parent to list effective custom modules, in one of the
* following formats:
*
* * `organizations/{organization}/locations/{location}`
* * `folders/{folder}/locations/{location}`
* * `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. The maximum number of results to return in a single response.
* Default is 10, minimum is 1, maximum is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. The maximum number of results to return in a single response.
* Default is 10, minimum is 1, maximum is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum number of results to return in a single response.
* Default is 10, minimum is 1, maximum is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous request. Provide this
* token to retrieve the next page of results.
*
* When paginating, the rest of the request must match the request that
* generated the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous request. Provide this
* token to retrieve the next page of results.
*
* When paginating, the rest of the request must match the request that
* generated the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous request. Provide this
* token to retrieve the next page of results.
*
* When paginating, the rest of the request must match the request that
* generated the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous request. Provide this
* token to retrieve the next page of results.
*
* When paginating, the rest of the request must match the request that
* generated the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous request. Provide this
* token to retrieve the next page of results.
*
* When paginating, the rest of the request must match the request that
* generated the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.securitycentermanagement.v1.ListEffectiveEventThreatDetectionCustomModulesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.securitycentermanagement.v1.ListEffectiveEventThreatDetectionCustomModulesRequest)
private static final com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest();
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<
ListEffectiveEventThreatDetectionCustomModulesRequest>
PARSER =
new com.google.protobuf.AbstractParser<
ListEffectiveEventThreatDetectionCustomModulesRequest>() {
@java.lang.Override
public ListEffectiveEventThreatDetectionCustomModulesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException()
.setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListEffectiveEventThreatDetectionCustomModulesRequest>
parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListEffectiveEventThreatDetectionCustomModulesRequest>
getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.securitycentermanagement.v1
.ListEffectiveEventThreatDetectionCustomModulesRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hadoop | 35,751 | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/s3guard/S3GuardTool.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.s3a.s3guard;
import java.io.Closeable;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.Scanner;
import java.util.concurrent.TimeUnit;
import software.amazon.awssdk.services.s3.model.MultipartUpload;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FilterFileSystem;
import org.apache.hadoop.fs.Options;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.s3a.Constants;
import org.apache.hadoop.fs.s3a.S3AFileSystem;
import org.apache.hadoop.fs.s3a.WriteOperationHelper;
import org.apache.hadoop.fs.s3a.auth.RolePolicies;
import org.apache.hadoop.fs.s3a.auth.delegation.S3ADelegationTokens;
import org.apache.hadoop.fs.s3a.commit.CommitConstants;
import org.apache.hadoop.fs.s3a.commit.InternalCommitterConstants;
import org.apache.hadoop.fs.s3a.select.SelectConstants;
import org.apache.hadoop.fs.s3a.tools.BucketTool;
import org.apache.hadoop.fs.s3a.tools.MarkerTool;
import org.apache.hadoop.fs.shell.CommandFormat;
import org.apache.hadoop.fs.statistics.IOStatistics;
import org.apache.hadoop.fs.store.audit.AuditSpan;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ExitCodeProvider;
import org.apache.hadoop.util.ExitUtil;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import static org.apache.hadoop.fs.s3a.Constants.*;
import static org.apache.hadoop.fs.s3a.Invoker.LOG_EVENT;
import static org.apache.hadoop.fs.s3a.commit.CommitConstants.*;
import static org.apache.hadoop.fs.s3a.commit.staging.StagingCommitterConstants.FILESYSTEM_TEMP_PATH;
import static org.apache.hadoop.fs.s3a.impl.InternalConstants.S3A_DYNAMIC_CAPABILITIES;
import static org.apache.hadoop.fs.s3a.impl.streams.StreamIntegration.DEFAULT_STREAM_TYPE;
import static org.apache.hadoop.fs.s3a.select.SelectConstants.SELECT_UNSUPPORTED;
import static org.apache.hadoop.fs.statistics.IOStatisticsLogging.ioStatisticsToPrettyString;
import static org.apache.hadoop.fs.statistics.IOStatisticsSupport.retrieveIOStatistics;
import static org.apache.hadoop.fs.statistics.StoreStatisticNames.MULTIPART_UPLOAD_ABORTED;
import static org.apache.hadoop.service.launcher.LauncherExitCodes.*;
/**
* CLI to manage S3Guard Metadata Store.
* <p>
* Some management tools invoke this class directly.
*/
@InterfaceAudience.LimitedPrivate("management tools")
@InterfaceStability.Evolving
public abstract class S3GuardTool extends Configured implements Tool,
Closeable {
private static final Logger LOG = LoggerFactory.getLogger(S3GuardTool.class);
private static final String ENTRY_POINT = "s3guard";
private static final String NAME = ENTRY_POINT;
private static final String COMMON_USAGE =
"When possible and not overridden by more specific options, metadata\n" +
"repository information will be inferred from the S3A URL (if provided)" +
"\n\n" +
"Generic options supported are:\n" +
" -conf <config file> - specify an application configuration file\n" +
" -D <property=value> - define a value for a given property\n";
static final List<String> UNSUPPORTED_COMMANDS = Arrays.asList(
"init",
"destroy",
"authoritative",
"diff",
"fsck",
"import",
"prune",
"set-capacity");
/**
* Usage includes supported commands, but not the excluded ones.
*/
private static final String USAGE = ENTRY_POINT +
" [command] [OPTIONS] [s3a://BUCKET]\n\n" +
"Commands: \n" +
"\t" + BucketInfo.NAME + " - " + BucketInfo.PURPOSE + "\n" +
"\t" + BucketTool.NAME + " - " + BucketTool.PURPOSE + "\n" +
"\t" + MarkerTool.MARKERS + " - " + MarkerTool.PURPOSE + "\n" +
"\t" + Uploads.NAME + " - " + Uploads.PURPOSE + "\n";
private static final String E_UNSUPPORTED = "This command is no longer supported";
public abstract String getUsage();
// Exit codes
static final int SUCCESS = EXIT_SUCCESS;
static final int INVALID_ARGUMENT = EXIT_COMMAND_ARGUMENT_ERROR;
static final int E_USAGE = EXIT_USAGE;
static final int ERROR = EXIT_FAIL;
static final int E_BAD_STATE = EXIT_NOT_ACCEPTABLE;
static final int E_NOT_FOUND = EXIT_NOT_FOUND;
static final int E_S3GUARD_UNSUPPORTED = ERROR;
/** Error String when the wrong FS is used for binding: {@value}. **/
@VisibleForTesting
public static final String WRONG_FILESYSTEM = "Wrong filesystem for ";
/**
* The FS we close when we are closed.
*/
private FileSystem baseFS;
private S3AFileSystem filesystem;
private final CommandFormat commandFormat;
public static final String META_FLAG = "meta";
// These are common options
public static final String DAYS_FLAG = "days";
public static final String HOURS_FLAG = "hours";
public static final String MINUTES_FLAG = "minutes";
public static final String SECONDS_FLAG = "seconds";
public static final String AGE_OPTIONS_USAGE = "[-days <days>] "
+ "[-hours <hours>] [-minutes <minutes>] [-seconds <seconds>]";
public static final String VERBOSE = "verbose";
/**
* Constructor a S3Guard tool with HDFS configuration.
* @param conf Configuration.
* @param opts any boolean options to support
*/
protected S3GuardTool(Configuration conf, String... opts) {
this(conf, 0, Integer.MAX_VALUE, opts);
}
/**
* Constructor a S3Guard tool with HDFS configuration.
* @param conf Configuration.
* @param min min number of args
* @param max max number of args
* @param opts any boolean options to support
*/
protected S3GuardTool(Configuration conf, int min, int max, String... opts) {
super(conf);
commandFormat = new CommandFormat(min, max, opts);
}
/**
* Return sub-command name.
* @return sub-command name.
*/
public abstract String getName();
/**
* Close the FS.
* @throws IOException on failure.
*/
@Override
public void close() throws IOException {
IOUtils.cleanupWithLogger(LOG,
baseFS);
baseFS = null;
filesystem = null;
}
private long getDeltaComponent(TimeUnit unit, String arg) {
String raw = getCommandFormat().getOptValue(arg);
if (raw == null || raw.isEmpty()) {
return 0;
}
Long parsed = Long.parseLong(raw);
return unit.toMillis(parsed);
}
/**
* Convert all age options supplied to total milliseconds of time.
* @return Sum of all age options, or zero if none were given.
*/
long ageOptionsToMsec() {
long cliDelta = 0;
cliDelta += getDeltaComponent(TimeUnit.DAYS, DAYS_FLAG);
cliDelta += getDeltaComponent(TimeUnit.HOURS, HOURS_FLAG);
cliDelta += getDeltaComponent(TimeUnit.MINUTES, MINUTES_FLAG);
cliDelta += getDeltaComponent(TimeUnit.SECONDS, SECONDS_FLAG);
return cliDelta;
}
protected final void addAgeOptions() {
CommandFormat format = getCommandFormat();
format.addOptionWithValue(DAYS_FLAG);
format.addOptionWithValue(HOURS_FLAG);
format.addOptionWithValue(MINUTES_FLAG);
format.addOptionWithValue(SECONDS_FLAG);
}
/**
* Create and initialize a new S3A FileSystem instance.
*
* @param path s3a URI
* @throws IOException failure to init filesystem
* @throws ExitUtil.ExitException if the FS is not an S3A FS
*/
protected void initS3AFileSystem(String path) throws IOException {
LOG.debug("Initializing S3A FS to {}", path);
URI uri = toUri(path);
bindFilesystem(FileSystem.newInstance(uri, getConf()));
}
/**
* Parse CLI arguments and returns the position arguments.
* The options are stored in {@link #commandFormat}.
*
* @param args command line arguments.
* @return the position arguments from CLI.
*/
protected List<String> parseArgs(String[] args) {
return getCommandFormat().parse(args, 1);
}
/**
* Process the arguments.
* @param args raw args
* @return process arg list.
* @throws ExitUtil.ExitException if there's an unknown option.
*/
protected List<String> parseArgsWithErrorReporting(final String[] args)
throws ExitUtil.ExitException {
try {
return parseArgs(args);
} catch (CommandFormat.UnknownOptionException e) {
errorln(getUsage());
throw new ExitUtil.ExitException(EXIT_USAGE, e.getMessage(), e);
}
}
protected S3AFileSystem getFilesystem() {
return filesystem;
}
/**
* Sets the filesystem; it must be an S3A FS instance, or a FilterFS
* around an S3A Filesystem.
* @param bindingFS filesystem to bind to
* @return the bound FS.
* @throws ExitUtil.ExitException if the FS is not an S3 FS
*/
protected S3AFileSystem bindFilesystem(FileSystem bindingFS) {
FileSystem fs = bindingFS;
baseFS = bindingFS;
while (fs instanceof FilterFileSystem) {
fs = ((FilterFileSystem) fs).getRawFileSystem();
}
if (!(fs instanceof S3AFileSystem)) {
throw new ExitUtil.ExitException(EXIT_SERVICE_UNAVAILABLE,
WRONG_FILESYSTEM + "URI " + fs.getUri() + " : "
+ fs.getClass().getName());
}
filesystem = (S3AFileSystem) fs;
return filesystem;
}
/**
* Reset the store and filesystem bindings.
*/
protected void resetBindings() {
filesystem = null;
}
protected final CommandFormat getCommandFormat() {
return commandFormat;
}
@Override
public final int run(String[] args) throws Exception {
return run(args, System.out);
}
/**
* Run the tool, capturing the output (if the tool supports that).
*
* As well as returning an exit code, the implementations can choose to
* throw an instance of {@code ExitUtil.ExitException} with their exit
* code set to the desired exit value. The exit code of such an exception
* is used for the tool's exit code, and the stack trace only logged at
* debug.
* @param args argument list
* @param out output stream
* @return the exit code to return.
* @throws Exception on any failure
*/
public abstract int run(String[] args, PrintStream out) throws Exception,
ExitUtil.ExitException;
/**
* Dump the filesystem Storage Statistics if the FS is not null.
* Only non-zero statistics are printed.
* @param stream output stream
*/
protected void dumpFileSystemStatistics(PrintStream stream) {
FileSystem fs = getFilesystem();
if (fs == null) {
return;
}
println(stream, "%nIO Statistics for %s%n", fs.getUri());
final IOStatistics iostats = retrieveIOStatistics(fs);
if (iostats != null) {
println(stream, ioStatisticsToPrettyString(iostats));
} else {
println(stream, "FileSystem does not provide IOStatistics");
}
println(stream, "");
}
/**
* Get info about a bucket and its S3Guard integration status.
*/
public static class BucketInfo extends S3GuardTool {
public static final String BUCKET_INFO = "bucket-info";
public static final String NAME = BUCKET_INFO;
public static final String GUARDED_FLAG = "guarded";
public static final String UNGUARDED_FLAG = "unguarded";
public static final String ENCRYPTION_FLAG = "encryption";
public static final String MAGIC_FLAG = "magic";
public static final String MARKERS_FLAG = "markers";
public static final String MARKERS_AWARE = "aware";
public static final String FIPS_FLAG = "fips";
public static final String PURPOSE = "provide/check information"
+ " about a specific bucket";
private static final String USAGE = NAME + " [OPTIONS] s3a://BUCKET\n"
+ "\t" + PURPOSE + "\n\n"
+ "Common options:\n"
+ " -" + FIPS_FLAG + " - Require the client is using a FIPS endpoint\n"
+ " -" + MAGIC_FLAG +
" - Require the S3 filesystem to be support the \"magic\" committer\n"
+ " -" + ENCRYPTION_FLAG
+ " (none, sse-s3, sse-kms) - Require encryption policy\n"
+ " -" + MARKERS_FLAG
+ " (aware, keep, delete, authoritative) - directory markers policy\n"
+ " -" + GUARDED_FLAG + " - Require S3Guard. Will always fail.\n"
+ " -" + UNGUARDED_FLAG + " - Force S3Guard to be disabled (always true)\n";
/**
* Output when the client cannot get the location of a bucket.
*/
@VisibleForTesting
public static final String LOCATION_UNKNOWN =
"Location unknown -caller lacks "
+ RolePolicies.S3_GET_BUCKET_LOCATION + " permission";
@VisibleForTesting
public static final String IS_MARKER_AWARE =
"\tThe S3A connector does not delete markers";
public static final String CAPABILITY_FORMAT = "\t%s %s%n";
public BucketInfo(Configuration conf) {
super(conf, GUARDED_FLAG, UNGUARDED_FLAG, FIPS_FLAG, MAGIC_FLAG);
CommandFormat format = getCommandFormat();
format.addOptionWithValue(ENCRYPTION_FLAG);
format.addOptionWithValue(MARKERS_FLAG);
}
@Override
public String getName() {
return NAME;
}
@Override
public String getUsage() {
return USAGE;
}
public int run(String[] args, PrintStream out)
throws InterruptedException, IOException {
List<String> paths = parseArgs(args);
if (paths.isEmpty()) {
errorln(getUsage());
throw invalidArgs("No bucket specified");
}
String s3Path = paths.get(0);
CommandFormat commands = getCommandFormat();
URI fsURI = toUri(s3Path);
S3AFileSystem fs = bindFilesystem(
FileSystem.newInstance(fsURI, getConf()));
Configuration conf = fs.getConf();
URI fsUri = fs.getUri();
println(out, "Filesystem %s", fsUri);
final Path root = new Path("/");
try {
println(out, "Location: %s", fs.getBucketLocation());
} catch (IOException e) {
// Caller cannot get the location of this bucket due to permissions
// in their role or the bucket itself, or it is not an operation
// supported by this store.
// Note and continue.
LOG.debug("failed to get bucket location", e);
println(out, LOCATION_UNKNOWN);
// it may be the bucket is not found; we can't differentiate
// that and handle third party store issues where the API may
// not work.
// Fallback to looking for bucket root attributes.
println(out, "Probing for bucket existence");
fs.listXAttrs(new Path("/"));
}
println(out, "%nS3A Client");
printOption(out, "\tSigning Algorithm", SIGNING_ALGORITHM, "(unset)");
String endpoint = conf.getTrimmed(ENDPOINT, "");
println(out, "\tEndpoint: %s=%s",
ENDPOINT,
StringUtils.isNotEmpty(endpoint) ? endpoint : "(unset)");
String region = conf.getTrimmed(AWS_REGION, "");
println(out, "\tRegion: %s=%s", AWS_REGION,
StringUtils.isNotEmpty(region) ? region : "(unset)");
String encryption =
printOption(out, "\tEncryption", Constants.S3_ENCRYPTION_ALGORITHM,
"none");
// stream input
printOption(out, "\tInput stream type", INPUT_STREAM_TYPE,
DEFAULT_STREAM_TYPE.getName());
printOption(out, "\tInput seek policy", INPUT_STREAM_TYPE,
Options.OpenFileOptions.FS_OPTION_OPENFILE_READ_POLICY_DEFAULT);
printOption(out, "\tChange Detection Source", CHANGE_DETECT_SOURCE,
CHANGE_DETECT_SOURCE_DEFAULT);
printOption(out, "\tChange Detection Mode", CHANGE_DETECT_MODE,
CHANGE_DETECT_MODE_DEFAULT);
// committers
println(out, "%nS3A Committers");
boolean magic = fs.hasPathCapability(
new Path(s3Path),
CommitConstants.STORE_CAPABILITY_MAGIC_COMMITTER);
println(out, "\tThe \"magic\" committer %s supported in the filesystem",
magic ? "is" : "is not");
printOption(out, "\tS3A Committer factory class",
S3A_COMMITTER_FACTORY_KEY, "");
String committer = conf.getTrimmed(FS_S3A_COMMITTER_NAME,
COMMITTER_NAME_FILE);
printOption(out, "\tS3A Committer name",
FS_S3A_COMMITTER_NAME, COMMITTER_NAME_FILE);
switch (committer) {
case COMMITTER_NAME_FILE:
println(out, "The original 'file' committer is active"
+ " -this is slow and potentially unsafe");
break;
case InternalCommitterConstants.COMMITTER_NAME_STAGING:
println(out, "The 'staging' committer is used "
+ "-prefer the 'magic' committer");
// fall through
case COMMITTER_NAME_DIRECTORY:
// fall through
case COMMITTER_NAME_PARTITIONED:
// print all the staging options.
printOption(out, "\tCluster filesystem staging directory",
FS_S3A_COMMITTER_STAGING_TMP_PATH, FILESYSTEM_TEMP_PATH);
printOption(out, "\tLocal filesystem buffer directory",
BUFFER_DIR, "");
printOption(out, "\tFile conflict resolution",
FS_S3A_COMMITTER_STAGING_CONFLICT_MODE, DEFAULT_CONFLICT_MODE);
break;
case COMMITTER_NAME_MAGIC:
printOption(out, "\tStore magic committer integration",
MAGIC_COMMITTER_ENABLED,
Boolean.toString(DEFAULT_MAGIC_COMMITTER_ENABLED));
if (!magic) {
println(out, "Warning: although the magic committer is enabled, "
+ "the store does not support it");
}
break;
default:
println(out, "\tWarning: committer '%s' is unknown", committer);
}
// look at delegation token support
println(out, "%nSecurity");
if (fs.getDelegationTokens().isPresent()) {
// DT is enabled
S3ADelegationTokens dtIntegration = fs.getDelegationTokens().get();
println(out, "\tDelegation Support enabled: token kind = %s",
dtIntegration.getTokenKind());
UserGroupInformation.AuthenticationMethod authenticationMethod
= UserGroupInformation.getCurrentUser().getAuthenticationMethod();
println(out, "\tHadoop security mode: %s", authenticationMethod);
if (UserGroupInformation.isSecurityEnabled()) {
println(out,
"\tWarning: security is disabled; tokens will not be collected");
}
} else {
println(out, "\tDelegation token support is disabled");
}
if (commands.getOpt(GUARDED_FLAG)) {
throw badState("S3Guard is not supported");
}
if (commands.getOpt(MAGIC_FLAG) && !magic) {
throw badState("The magic committer is not enabled for %s", fsUri);
}
String desiredEncryption = getCommandFormat()
.getOptValue(ENCRYPTION_FLAG);
if (StringUtils.isNotEmpty(desiredEncryption)
&& !desiredEncryption.equalsIgnoreCase(encryption)) {
throw badState("Bucket %s: required encryption is %s"
+ " but actual encryption is %s",
fsUri, desiredEncryption, encryption);
}
// directory markers
processMarkerOption(out,
getCommandFormat().getOptValue(MARKERS_FLAG));
// and check for capabilities
println(out, "%nStore Capabilities");
for (String capability : S3A_DYNAMIC_CAPABILITIES) {
out.printf(CAPABILITY_FORMAT, capability,
fs.hasPathCapability(root, capability));
}
// the performance flags are dynamically generated
fs.createStoreContext().getPerformanceFlags().pathCapabilities()
.forEach(capability -> out.printf(CAPABILITY_FORMAT, capability, "true"));
// finish with a newline
println(out, "");
if (commands.getOpt(FIPS_FLAG) && !fs.hasPathCapability(root, FIPS_ENDPOINT)) {
throw badState("FIPS endpoint was required but the filesystem is not using it");
}
// and finally flush the output and report a success.
out.flush();
return SUCCESS;
}
/**
* Validate the marker options.
* @param out output stream
* @param marker desired marker option -may be null.
*/
private void processMarkerOption(final PrintStream out,
final String marker) {
println(out, "%nThis version of Hadoop always retains directory markers");
String desiredMarker = marker == null
? ""
: marker.trim().toLowerCase(Locale.ROOT);
switch(desiredMarker) {
case "":
case DIRECTORY_MARKER_POLICY_KEEP:
break;
case MARKERS_AWARE:
// simple awareness test -provides a way to validate compatibility
// on the command line
println(out, IS_MARKER_AWARE);
break;
default:
throw badState("Unsupported Marker Policy \"%s\"", desiredMarker);
}
}
private String printOption(PrintStream out,
String description, String key, String defVal) {
String t = getFilesystem().getConf().getTrimmed(key, defVal);
println(out, "%s: %s=%s", description, key, t);
return t;
}
}
/**
* Command to list / abort pending multipart uploads.
*/
static final class Uploads extends S3GuardTool {
public static final String NAME = "uploads";
public static final String ABORT = "abort";
public static final String LIST = "list";
public static final String EXPECT = "expect";
public static final String FORCE = "force";
public static final String PURPOSE = "list or abort pending " +
"multipart uploads";
private static final String USAGE = NAME + " [OPTIONS] " +
"s3a://BUCKET[/path]\n"
+ "\t" + PURPOSE + "\n\n"
+ "Common options:\n"
+ " (-" + LIST + " | -" + EXPECT + " <num-uploads> | -" + ABORT
+ ") [-" + VERBOSE + "] "
+ "[<age-options>] [-force]\n"
+ "\t - Under given path, list or delete all uploads," +
" or only those \n"
+ "older than specified by <age-options>\n"
+ "<age-options> are any combination of the integer-valued options:\n"
+ "\t" + AGE_OPTIONS_USAGE + "\n"
+ "-" + EXPECT + " is similar to list, except no output is printed,\n"
+ "\tbut the exit code will be an error if the provided number\n"
+ "\tis different that the number of uploads found by the command.\n"
+ "-" + FORCE + " option prevents the \"Are you sure\" prompt when\n"
+ "\tusing -" + ABORT;
/** Constant used for output and parsed by tests. */
public static final String TOTAL = "Total";
/** Runs in one of three modes. */
private enum Mode {LIST, EXPECT, ABORT}
private Mode mode = null;
/** For Mode == EXPECT, expected listing size. */
private int expectedCount;
/** List/abort uploads older than this many milliseconds. */
private long ageMsec = 0;
/** Verbose output flag. */
private boolean verbose = false;
/** Whether to delete with out "are you sure" prompt. */
private boolean force = false;
/** Path prefix to use when searching multipart uploads. */
private String prefix;
Uploads(Configuration conf) {
super(conf, ABORT, LIST, VERBOSE, FORCE);
addAgeOptions();
getCommandFormat().addOptionWithValue(EXPECT);
}
@Override
public String getName() {
return NAME;
}
@Override
public String getUsage() {
return USAGE;
}
public int run(String[] args, PrintStream out)
throws InterruptedException, IOException {
List<String> paths = parseArgs(args);
if (paths.isEmpty()) {
errorln(getUsage());
throw invalidArgs("No options specified");
}
processArgs(paths, out);
println(out, "Listing uploads under path \"%s\"", prefix);
promptBeforeAbort(out);
processUploads(out);
if (verbose) {
dumpFileSystemStatistics(out);
}
out.flush();
return SUCCESS;
}
private void promptBeforeAbort(PrintStream out) throws IOException {
if (mode != Mode.ABORT || force) {
return;
}
Scanner scanner = new Scanner(System.in, "UTF-8");
out.println("Are you sure you want to delete any pending " +
"uploads? (yes/no) >");
String response = scanner.nextLine();
if (!"yes".equalsIgnoreCase(response)) {
throw S3GuardTool.userAborted("User did not answer yes, quitting.");
}
}
private void processUploads(PrintStream out) throws IOException {
final S3AFileSystem fs = getFilesystem();
RemoteIterator<MultipartUpload> uploads = fs.listUploads(prefix);
// create a span so that the write operation helper
// is within one
AuditSpan span =
fs.createSpan(MULTIPART_UPLOAD_ABORTED,
prefix, null);
final WriteOperationHelper writeOperationHelper
= fs.getWriteOperationHelper();
int count = 0;
while (uploads.hasNext()) {
MultipartUpload upload = uploads.next();
if (!olderThan(upload, ageMsec)) {
continue;
}
count++;
if (mode == Mode.ABORT || mode == Mode.LIST || verbose) {
println(out, "%s%s %s", mode == Mode.ABORT ? "Deleting: " : "",
upload.key(), upload.uploadId());
}
if (mode == Mode.ABORT) {
writeOperationHelper
.abortMultipartUpload(upload.key(), upload.uploadId(),
true, LOG_EVENT);
}
}
span.deactivate();
if (mode != Mode.EXPECT || verbose) {
println(out, "%s %d uploads %s.", TOTAL, count,
mode == Mode.ABORT ? "deleted" : "found");
}
if (mode == Mode.EXPECT) {
if (count != expectedCount) {
throw badState("Expected upload count under %s: %d, found %d",
prefix, expectedCount, count);
}
}
}
/**
* Check if upload is at least as old as given age.
* @param u upload to check
* @param msec age in milliseconds
* @return true iff u was created at least age milliseconds ago.
*/
private boolean olderThan(MultipartUpload u, long msec) {
if (msec == 0) {
return true;
}
Date ageDate = new Date(System.currentTimeMillis() - msec);
return ageDate.compareTo(Date.from(u.initiated())) >= 0;
}
protected void processArgs(List<String> args, PrintStream out)
throws IOException {
CommandFormat commands = getCommandFormat();
String err = "Can only specify one of -" + LIST + ", " +
" -" + ABORT + ", and " + EXPECT;
// Three mutually-exclusive options
if (commands.getOpt(LIST)) {
mode = Mode.LIST;
}
if (commands.getOpt(ABORT)) {
if (mode != null) {
throw invalidArgs(err);
}
mode = Mode.ABORT;
}
String expectVal = commands.getOptValue(EXPECT);
if (expectVal != null) {
if (mode != null) {
throw invalidArgs(err);
}
mode = Mode.EXPECT;
expectedCount = Integer.parseInt(expectVal);
}
// Default to list
if (mode == null) {
vprintln(out, "No mode specified, defaulting to -" + LIST);
mode = Mode.LIST;
}
// Other flags
if (commands.getOpt(VERBOSE)) {
verbose = true;
}
if (commands.getOpt(FORCE)) {
force = true;
}
ageMsec = ageOptionsToMsec();
String s3Path = args.get(0);
URI uri = S3GuardTool.toUri(s3Path);
prefix = uri.getPath();
if (prefix.length() > 0) {
prefix = prefix.substring(1);
}
vprintln(out, "Command: %s, age %d msec, path %s (prefix \"%s\")",
mode.name(), ageMsec, s3Path, prefix);
initS3AFileSystem(s3Path);
}
/**
* If verbose flag is set, print a formatted string followed by a newline
* to the output stream.
* @param out destination
* @param format format string
* @param args optional arguments
*/
private void vprintln(PrintStream out, String format, Object...
args) {
if (verbose) {
out.println(String.format(format, args));
}
}
}
private static S3GuardTool command;
/**
* Convert a path to a URI, catching any {@code URISyntaxException}
* and converting to an invalid args exception.
* @param s3Path path to convert to a URI
* @return a URI of the path
* @throws ExitUtil.ExitException INVALID_ARGUMENT if the URI is invalid
*/
protected static URI toUri(String s3Path) {
URI uri;
try {
uri = new URI(s3Path);
} catch (URISyntaxException e) {
throw invalidArgs("Not a valid filesystem path: %s", s3Path);
}
return uri;
}
private static void printHelp() {
if (command == null) {
errorln("Usage: hadoop " + USAGE);
errorln("\tperform S3A connector administrative commands.");
} else {
errorln("Usage: hadoop " + ENTRY_POINT + command.getUsage());
}
errorln();
errorln(COMMON_USAGE);
}
protected static void errorln() {
System.err.println();
}
protected static void errorln(String x) {
System.err.println(x);
}
/**
* Print a formatted string followed by a newline to the output stream.
* @param out destination
* @param format format string
* @param args optional arguments
*/
protected static void println(PrintStream out,
String format,
Object... args) {
out.println(String.format(format, args));
}
/**
* Handle FileNotFoundException by converting to an exit exception
* with specific error code.
* @param e exception
* @return a new exception to throw
*/
protected static ExitUtil.ExitException notFound(
FileNotFoundException e) {
return new ExitUtil.ExitException(
E_NOT_FOUND, e.toString(), e);
}
/**
* Build the exception to raise on invalid arguments.
* @param format string format
* @param args optional arguments for the string
* @return a new exception to throw
*/
protected static ExitUtil.ExitException invalidArgs(
String format, Object... args) {
return exitException(INVALID_ARGUMENT, format, args);
}
/**
* Build the exception to raise on a bad store/bucket state.
* @param format string format
* @param args optional arguments for the string
* @return a new exception to throw
*/
protected static ExitUtil.ExitException badState(
String format, Object... args) {
return exitException(E_BAD_STATE, format, args);
}
/**
* Crate an exception declaring S3Guard is unsupported.
* @return an exception raise.
*/
protected static ExitUtil.ExitException s3guardUnsupported() {
throw exitException(E_S3GUARD_UNSUPPORTED, E_UNSUPPORTED);
}
/**
* Build the exception to raise on user-aborted action.
* @param format string format
* @param args optional arguments for the string
* @return a new exception to throw
*/
protected static ExitUtil.ExitException userAborted(
String format, Object... args) {
return exitException(ERROR, format, args);
}
/**
* Build a exception to throw with a formatted message.
* @param exitCode exit code to use
* @param format string format
* @param args optional arguments for the string
* @return a new exception to throw
*/
protected static ExitUtil.ExitException exitException(
final int exitCode,
final String format,
final Object... args) {
return new ExitUtil.ExitException(exitCode,
String.format(format, args));
}
/**
* Execute the command with the given arguments.
*
* @param conf Hadoop configuration.
* @param args command specific arguments.
* @return exit code.
* @throws Exception on I/O errors.
*/
public static int run(Configuration conf, String... args) throws
Exception {
/* ToolRunner.run does this too, but we must do it before looking at
subCommand or instantiating the cmd object below */
String[] otherArgs = new GenericOptionsParser(conf, args)
.getRemainingArgs();
if (otherArgs.length == 0) {
printHelp();
throw new ExitUtil.ExitException(E_USAGE, "No arguments provided");
}
final String subCommand = otherArgs[0];
LOG.debug("Executing command {}", subCommand);
// if it is no longer supported: raise an exception
if (UNSUPPORTED_COMMANDS.contains(subCommand)) {
throw s3guardUnsupported();
}
switch (subCommand) {
case BucketInfo.NAME:
command = new BucketInfo(conf);
break;
case BucketTool.NAME:
command = new BucketTool(conf);
break;
case MarkerTool.MARKERS:
command = new MarkerTool(conf);
break;
case Uploads.NAME:
command = new Uploads(conf);
break;
case SelectConstants.NAME:
throw new ExitUtil.ExitException(
EXIT_UNSUPPORTED_VERSION, SELECT_UNSUPPORTED);
default:
printHelp();
throw new ExitUtil.ExitException(E_USAGE,
"Unknown command " + subCommand);
}
try {
return ToolRunner.run(conf, command, otherArgs);
} finally {
IOUtils.cleanupWithLogger(LOG, command);
}
}
/**
* Main entry point. Calls {@code System.exit()} on all execution paths.
* @param args argument list
*/
public static void main(String[] args) {
try {
int ret = run(new Configuration(), args);
exit(ret, "");
} catch (CommandFormat.UnknownOptionException e) {
errorln(e.getMessage());
printHelp();
exit(E_USAGE, e.getMessage());
} catch (ExitUtil.ExitException e) {
// explicitly raised exit code
LOG.debug("Exception raised", e);
exit(e.getExitCode(), e.toString());
} catch (FileNotFoundException e) {
// Bucket doesn't exist or similar - return code of 44, "404".
errorln(e.toString());
LOG.debug("Not found:", e);
exit(EXIT_NOT_FOUND, e.toString());
} catch (Throwable e) {
if (e instanceof ExitCodeProvider) {
// this exception provides its own exit code
final ExitCodeProvider ec = (ExitCodeProvider) e;
LOG.debug("Exception raised", e);
exit(ec.getExitCode(), e.toString());
} else {
e.printStackTrace(System.err);
exit(ERROR, e.toString());
}
}
}
protected static void exit(int status, String text) {
ExitUtil.terminate(status, text);
}
}
|
googleapis/google-cloud-java | 36,083 | java-analytics-admin/proto-google-analytics-admin-v1alpha/src/main/java/com/google/analytics/admin/v1alpha/UpdateAccountRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/admin/v1alpha/analytics_admin.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.admin.v1alpha;
/**
*
*
* <pre>
* Request message for UpdateAccount RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.UpdateAccountRequest}
*/
public final class UpdateAccountRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.admin.v1alpha.UpdateAccountRequest)
UpdateAccountRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateAccountRequest.newBuilder() to construct.
private UpdateAccountRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateAccountRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateAccountRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateAccountRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateAccountRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.UpdateAccountRequest.class,
com.google.analytics.admin.v1alpha.UpdateAccountRequest.Builder.class);
}
private int bitField0_;
public static final int ACCOUNT_FIELD_NUMBER = 1;
private com.google.analytics.admin.v1alpha.Account account_;
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the account field is set.
*/
@java.lang.Override
public boolean hasAccount() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The account.
*/
@java.lang.Override
public com.google.analytics.admin.v1alpha.Account getAccount() {
return account_ == null
? com.google.analytics.admin.v1alpha.Account.getDefaultInstance()
: account_;
}
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.analytics.admin.v1alpha.AccountOrBuilder getAccountOrBuilder() {
return account_ == null
? com.google.analytics.admin.v1alpha.Account.getDefaultInstance()
: account_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getAccount());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getAccount());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.admin.v1alpha.UpdateAccountRequest)) {
return super.equals(obj);
}
com.google.analytics.admin.v1alpha.UpdateAccountRequest other =
(com.google.analytics.admin.v1alpha.UpdateAccountRequest) obj;
if (hasAccount() != other.hasAccount()) return false;
if (hasAccount()) {
if (!getAccount().equals(other.getAccount())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAccount()) {
hash = (37 * hash) + ACCOUNT_FIELD_NUMBER;
hash = (53 * hash) + getAccount().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.admin.v1alpha.UpdateAccountRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.UpdateAccountRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateAccountRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.UpdateAccountRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateAccountRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.UpdateAccountRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateAccountRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.UpdateAccountRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateAccountRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.UpdateAccountRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateAccountRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.UpdateAccountRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.analytics.admin.v1alpha.UpdateAccountRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for UpdateAccount RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.UpdateAccountRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.admin.v1alpha.UpdateAccountRequest)
com.google.analytics.admin.v1alpha.UpdateAccountRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateAccountRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateAccountRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.UpdateAccountRequest.class,
com.google.analytics.admin.v1alpha.UpdateAccountRequest.Builder.class);
}
// Construct using com.google.analytics.admin.v1alpha.UpdateAccountRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getAccountFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
account_ = null;
if (accountBuilder_ != null) {
accountBuilder_.dispose();
accountBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateAccountRequest_descriptor;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateAccountRequest getDefaultInstanceForType() {
return com.google.analytics.admin.v1alpha.UpdateAccountRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateAccountRequest build() {
com.google.analytics.admin.v1alpha.UpdateAccountRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateAccountRequest buildPartial() {
com.google.analytics.admin.v1alpha.UpdateAccountRequest result =
new com.google.analytics.admin.v1alpha.UpdateAccountRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.analytics.admin.v1alpha.UpdateAccountRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.account_ = accountBuilder_ == null ? account_ : accountBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.admin.v1alpha.UpdateAccountRequest) {
return mergeFrom((com.google.analytics.admin.v1alpha.UpdateAccountRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.analytics.admin.v1alpha.UpdateAccountRequest other) {
if (other == com.google.analytics.admin.v1alpha.UpdateAccountRequest.getDefaultInstance())
return this;
if (other.hasAccount()) {
mergeAccount(other.getAccount());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getAccountFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.analytics.admin.v1alpha.Account account_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1alpha.Account,
com.google.analytics.admin.v1alpha.Account.Builder,
com.google.analytics.admin.v1alpha.AccountOrBuilder>
accountBuilder_;
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the account field is set.
*/
public boolean hasAccount() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The account.
*/
public com.google.analytics.admin.v1alpha.Account getAccount() {
if (accountBuilder_ == null) {
return account_ == null
? com.google.analytics.admin.v1alpha.Account.getDefaultInstance()
: account_;
} else {
return accountBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setAccount(com.google.analytics.admin.v1alpha.Account value) {
if (accountBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
account_ = value;
} else {
accountBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setAccount(com.google.analytics.admin.v1alpha.Account.Builder builderForValue) {
if (accountBuilder_ == null) {
account_ = builderForValue.build();
} else {
accountBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeAccount(com.google.analytics.admin.v1alpha.Account value) {
if (accountBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& account_ != null
&& account_ != com.google.analytics.admin.v1alpha.Account.getDefaultInstance()) {
getAccountBuilder().mergeFrom(value);
} else {
account_ = value;
}
} else {
accountBuilder_.mergeFrom(value);
}
if (account_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearAccount() {
bitField0_ = (bitField0_ & ~0x00000001);
account_ = null;
if (accountBuilder_ != null) {
accountBuilder_.dispose();
accountBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.analytics.admin.v1alpha.Account.Builder getAccountBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getAccountFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.analytics.admin.v1alpha.AccountOrBuilder getAccountOrBuilder() {
if (accountBuilder_ != null) {
return accountBuilder_.getMessageOrBuilder();
} else {
return account_ == null
? com.google.analytics.admin.v1alpha.Account.getDefaultInstance()
: account_;
}
}
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1alpha.Account,
com.google.analytics.admin.v1alpha.Account.Builder,
com.google.analytics.admin.v1alpha.AccountOrBuilder>
getAccountFieldBuilder() {
if (accountBuilder_ == null) {
accountBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1alpha.Account,
com.google.analytics.admin.v1alpha.Account.Builder,
com.google.analytics.admin.v1alpha.AccountOrBuilder>(
getAccount(), getParentForChildren(), isClean());
account_ = null;
}
return accountBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.admin.v1alpha.UpdateAccountRequest)
}
// @@protoc_insertion_point(class_scope:google.analytics.admin.v1alpha.UpdateAccountRequest)
private static final com.google.analytics.admin.v1alpha.UpdateAccountRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.admin.v1alpha.UpdateAccountRequest();
}
public static com.google.analytics.admin.v1alpha.UpdateAccountRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateAccountRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateAccountRequest>() {
@java.lang.Override
public UpdateAccountRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateAccountRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateAccountRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateAccountRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,179 | java-container/google-cloud-container/src/test/java/com/google/cloud/container/v1beta1/MockClusterManagerImpl.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.container.v1beta1;
import com.google.api.core.BetaApi;
import com.google.container.v1beta1.CancelOperationRequest;
import com.google.container.v1beta1.CheckAutopilotCompatibilityRequest;
import com.google.container.v1beta1.CheckAutopilotCompatibilityResponse;
import com.google.container.v1beta1.Cluster;
import com.google.container.v1beta1.ClusterManagerGrpc.ClusterManagerImplBase;
import com.google.container.v1beta1.ClusterUpgradeInfo;
import com.google.container.v1beta1.CompleteIPRotationRequest;
import com.google.container.v1beta1.CompleteNodePoolUpgradeRequest;
import com.google.container.v1beta1.CreateClusterRequest;
import com.google.container.v1beta1.CreateNodePoolRequest;
import com.google.container.v1beta1.DeleteClusterRequest;
import com.google.container.v1beta1.DeleteNodePoolRequest;
import com.google.container.v1beta1.FetchClusterUpgradeInfoRequest;
import com.google.container.v1beta1.FetchNodePoolUpgradeInfoRequest;
import com.google.container.v1beta1.GetClusterRequest;
import com.google.container.v1beta1.GetJSONWebKeysRequest;
import com.google.container.v1beta1.GetJSONWebKeysResponse;
import com.google.container.v1beta1.GetNodePoolRequest;
import com.google.container.v1beta1.GetOperationRequest;
import com.google.container.v1beta1.GetServerConfigRequest;
import com.google.container.v1beta1.ListClustersRequest;
import com.google.container.v1beta1.ListClustersResponse;
import com.google.container.v1beta1.ListLocationsRequest;
import com.google.container.v1beta1.ListLocationsResponse;
import com.google.container.v1beta1.ListNodePoolsRequest;
import com.google.container.v1beta1.ListNodePoolsResponse;
import com.google.container.v1beta1.ListOperationsRequest;
import com.google.container.v1beta1.ListOperationsResponse;
import com.google.container.v1beta1.ListUsableSubnetworksRequest;
import com.google.container.v1beta1.ListUsableSubnetworksResponse;
import com.google.container.v1beta1.NodePool;
import com.google.container.v1beta1.NodePoolUpgradeInfo;
import com.google.container.v1beta1.Operation;
import com.google.container.v1beta1.RollbackNodePoolUpgradeRequest;
import com.google.container.v1beta1.ServerConfig;
import com.google.container.v1beta1.SetAddonsConfigRequest;
import com.google.container.v1beta1.SetLabelsRequest;
import com.google.container.v1beta1.SetLegacyAbacRequest;
import com.google.container.v1beta1.SetLocationsRequest;
import com.google.container.v1beta1.SetLoggingServiceRequest;
import com.google.container.v1beta1.SetMaintenancePolicyRequest;
import com.google.container.v1beta1.SetMasterAuthRequest;
import com.google.container.v1beta1.SetMonitoringServiceRequest;
import com.google.container.v1beta1.SetNetworkPolicyRequest;
import com.google.container.v1beta1.SetNodePoolAutoscalingRequest;
import com.google.container.v1beta1.SetNodePoolManagementRequest;
import com.google.container.v1beta1.SetNodePoolSizeRequest;
import com.google.container.v1beta1.StartIPRotationRequest;
import com.google.container.v1beta1.UpdateClusterRequest;
import com.google.container.v1beta1.UpdateMasterRequest;
import com.google.container.v1beta1.UpdateNodePoolRequest;
import com.google.protobuf.AbstractMessage;
import com.google.protobuf.Empty;
import io.grpc.stub.StreamObserver;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
import javax.annotation.Generated;
@BetaApi
@Generated("by gapic-generator-java")
public class MockClusterManagerImpl extends ClusterManagerImplBase {
private List<AbstractMessage> requests;
private Queue<Object> responses;
public MockClusterManagerImpl() {
requests = new ArrayList<>();
responses = new LinkedList<>();
}
public List<AbstractMessage> getRequests() {
return requests;
}
public void addResponse(AbstractMessage response) {
responses.add(response);
}
public void setResponses(List<AbstractMessage> responses) {
this.responses = new LinkedList<Object>(responses);
}
public void addException(Exception exception) {
responses.add(exception);
}
public void reset() {
requests = new ArrayList<>();
responses = new LinkedList<>();
}
@Override
public void listClusters(
ListClustersRequest request, StreamObserver<ListClustersResponse> responseObserver) {
Object response = responses.poll();
if (response instanceof ListClustersResponse) {
requests.add(request);
responseObserver.onNext(((ListClustersResponse) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method ListClusters, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
ListClustersResponse.class.getName(),
Exception.class.getName())));
}
}
@Override
public void getCluster(GetClusterRequest request, StreamObserver<Cluster> responseObserver) {
Object response = responses.poll();
if (response instanceof Cluster) {
requests.add(request);
responseObserver.onNext(((Cluster) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method GetCluster, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Cluster.class.getName(),
Exception.class.getName())));
}
}
@Override
public void createCluster(
CreateClusterRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method CreateCluster, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void updateCluster(
UpdateClusterRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method UpdateCluster, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void updateNodePool(
UpdateNodePoolRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method UpdateNodePool, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void setNodePoolAutoscaling(
SetNodePoolAutoscalingRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method SetNodePoolAutoscaling, expected %s or"
+ " %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void setLoggingService(
SetLoggingServiceRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method SetLoggingService, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void setMonitoringService(
SetMonitoringServiceRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method SetMonitoringService, expected %s or"
+ " %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void setAddonsConfig(
SetAddonsConfigRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method SetAddonsConfig, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void setLocations(
SetLocationsRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method SetLocations, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void updateMaster(
UpdateMasterRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method UpdateMaster, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void setMasterAuth(
SetMasterAuthRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method SetMasterAuth, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void deleteCluster(
DeleteClusterRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method DeleteCluster, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void listOperations(
ListOperationsRequest request, StreamObserver<ListOperationsResponse> responseObserver) {
Object response = responses.poll();
if (response instanceof ListOperationsResponse) {
requests.add(request);
responseObserver.onNext(((ListOperationsResponse) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method ListOperations, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
ListOperationsResponse.class.getName(),
Exception.class.getName())));
}
}
@Override
public void getOperation(
GetOperationRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method GetOperation, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void cancelOperation(
CancelOperationRequest request, StreamObserver<Empty> responseObserver) {
Object response = responses.poll();
if (response instanceof Empty) {
requests.add(request);
responseObserver.onNext(((Empty) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method CancelOperation, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Empty.class.getName(),
Exception.class.getName())));
}
}
@Override
public void getServerConfig(
GetServerConfigRequest request, StreamObserver<ServerConfig> responseObserver) {
Object response = responses.poll();
if (response instanceof ServerConfig) {
requests.add(request);
responseObserver.onNext(((ServerConfig) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method GetServerConfig, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
ServerConfig.class.getName(),
Exception.class.getName())));
}
}
@Override
public void getJSONWebKeys(
GetJSONWebKeysRequest request, StreamObserver<GetJSONWebKeysResponse> responseObserver) {
Object response = responses.poll();
if (response instanceof GetJSONWebKeysResponse) {
requests.add(request);
responseObserver.onNext(((GetJSONWebKeysResponse) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method GetJSONWebKeys, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
GetJSONWebKeysResponse.class.getName(),
Exception.class.getName())));
}
}
@Override
public void listNodePools(
ListNodePoolsRequest request, StreamObserver<ListNodePoolsResponse> responseObserver) {
Object response = responses.poll();
if (response instanceof ListNodePoolsResponse) {
requests.add(request);
responseObserver.onNext(((ListNodePoolsResponse) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method ListNodePools, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
ListNodePoolsResponse.class.getName(),
Exception.class.getName())));
}
}
@Override
public void getNodePool(GetNodePoolRequest request, StreamObserver<NodePool> responseObserver) {
Object response = responses.poll();
if (response instanceof NodePool) {
requests.add(request);
responseObserver.onNext(((NodePool) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method GetNodePool, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
NodePool.class.getName(),
Exception.class.getName())));
}
}
@Override
public void createNodePool(
CreateNodePoolRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method CreateNodePool, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void deleteNodePool(
DeleteNodePoolRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method DeleteNodePool, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void completeNodePoolUpgrade(
CompleteNodePoolUpgradeRequest request, StreamObserver<Empty> responseObserver) {
Object response = responses.poll();
if (response instanceof Empty) {
requests.add(request);
responseObserver.onNext(((Empty) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method CompleteNodePoolUpgrade, expected %s or"
+ " %s",
response == null ? "null" : response.getClass().getName(),
Empty.class.getName(),
Exception.class.getName())));
}
}
@Override
public void rollbackNodePoolUpgrade(
RollbackNodePoolUpgradeRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method RollbackNodePoolUpgrade, expected %s or"
+ " %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void setNodePoolManagement(
SetNodePoolManagementRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method SetNodePoolManagement, expected %s or"
+ " %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void setLabels(SetLabelsRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method SetLabels, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void setLegacyAbac(
SetLegacyAbacRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method SetLegacyAbac, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void startIPRotation(
StartIPRotationRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method StartIPRotation, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void completeIPRotation(
CompleteIPRotationRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method CompleteIPRotation, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void setNodePoolSize(
SetNodePoolSizeRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method SetNodePoolSize, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void setNetworkPolicy(
SetNetworkPolicyRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method SetNetworkPolicy, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void setMaintenancePolicy(
SetMaintenancePolicyRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method SetMaintenancePolicy, expected %s or"
+ " %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void listUsableSubnetworks(
ListUsableSubnetworksRequest request,
StreamObserver<ListUsableSubnetworksResponse> responseObserver) {
Object response = responses.poll();
if (response instanceof ListUsableSubnetworksResponse) {
requests.add(request);
responseObserver.onNext(((ListUsableSubnetworksResponse) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method ListUsableSubnetworks, expected %s or"
+ " %s",
response == null ? "null" : response.getClass().getName(),
ListUsableSubnetworksResponse.class.getName(),
Exception.class.getName())));
}
}
@Override
public void checkAutopilotCompatibility(
CheckAutopilotCompatibilityRequest request,
StreamObserver<CheckAutopilotCompatibilityResponse> responseObserver) {
Object response = responses.poll();
if (response instanceof CheckAutopilotCompatibilityResponse) {
requests.add(request);
responseObserver.onNext(((CheckAutopilotCompatibilityResponse) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method CheckAutopilotCompatibility, expected"
+ " %s or %s",
response == null ? "null" : response.getClass().getName(),
CheckAutopilotCompatibilityResponse.class.getName(),
Exception.class.getName())));
}
}
@Override
public void listLocations(
ListLocationsRequest request, StreamObserver<ListLocationsResponse> responseObserver) {
Object response = responses.poll();
if (response instanceof ListLocationsResponse) {
requests.add(request);
responseObserver.onNext(((ListLocationsResponse) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method ListLocations, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
ListLocationsResponse.class.getName(),
Exception.class.getName())));
}
}
@Override
public void fetchClusterUpgradeInfo(
FetchClusterUpgradeInfoRequest request, StreamObserver<ClusterUpgradeInfo> responseObserver) {
Object response = responses.poll();
if (response instanceof ClusterUpgradeInfo) {
requests.add(request);
responseObserver.onNext(((ClusterUpgradeInfo) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method FetchClusterUpgradeInfo, expected %s or"
+ " %s",
response == null ? "null" : response.getClass().getName(),
ClusterUpgradeInfo.class.getName(),
Exception.class.getName())));
}
}
@Override
public void fetchNodePoolUpgradeInfo(
FetchNodePoolUpgradeInfoRequest request,
StreamObserver<NodePoolUpgradeInfo> responseObserver) {
Object response = responses.poll();
if (response instanceof NodePoolUpgradeInfo) {
requests.add(request);
responseObserver.onNext(((NodePoolUpgradeInfo) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method FetchNodePoolUpgradeInfo, expected %s"
+ " or %s",
response == null ? "null" : response.getClass().getName(),
NodePoolUpgradeInfo.class.getName(),
Exception.class.getName())));
}
}
}
|
googleapis/google-cloud-java | 36,337 | java-bigqueryconnection/google-cloud-bigqueryconnection/src/main/java/com/google/cloud/bigquery/connection/v1beta1/stub/HttpJsonConnectionServiceStub.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.bigquery.connection.v1beta1.stub;
import com.google.api.core.BetaApi;
import com.google.api.core.InternalApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.httpjson.ApiMethodDescriptor;
import com.google.api.gax.httpjson.HttpJsonCallSettings;
import com.google.api.gax.httpjson.HttpJsonStubCallableFactory;
import com.google.api.gax.httpjson.ProtoMessageRequestFormatter;
import com.google.api.gax.httpjson.ProtoMessageResponseParser;
import com.google.api.gax.httpjson.ProtoRestSerializer;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.RequestParamsBuilder;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.bigquery.connection.v1beta1.ConnectionProto;
import com.google.iam.v1.GetIamPolicyRequest;
import com.google.iam.v1.Policy;
import com.google.iam.v1.SetIamPolicyRequest;
import com.google.iam.v1.TestIamPermissionsRequest;
import com.google.iam.v1.TestIamPermissionsResponse;
import com.google.protobuf.Empty;
import com.google.protobuf.TypeRegistry;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* REST stub implementation for the ConnectionService service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@BetaApi
@Generated("by gapic-generator-java")
public class HttpJsonConnectionServiceStub extends ConnectionServiceStub {
private static final TypeRegistry typeRegistry = TypeRegistry.newBuilder().build();
private static final ApiMethodDescriptor<
ConnectionProto.CreateConnectionRequest, ConnectionProto.Connection>
createConnectionMethodDescriptor =
ApiMethodDescriptor
.<ConnectionProto.CreateConnectionRequest, ConnectionProto.Connection>newBuilder()
.setFullMethodName(
"google.cloud.bigquery.connection.v1beta1.ConnectionService/CreateConnection")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ConnectionProto.CreateConnectionRequest>newBuilder()
.setPath(
"/v1beta1/{parent=projects/*/locations/*}/connections",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ConnectionProto.CreateConnectionRequest>
serializer = ProtoRestSerializer.create();
serializer.putPathParam(fields, "parent", request.getParent());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ConnectionProto.CreateConnectionRequest>
serializer = ProtoRestSerializer.create();
serializer.putQueryParam(
fields, "connectionId", request.getConnectionId());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("connection", request.getConnection(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<ConnectionProto.Connection>newBuilder()
.setDefaultInstance(ConnectionProto.Connection.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<
ConnectionProto.GetConnectionRequest, ConnectionProto.Connection>
getConnectionMethodDescriptor =
ApiMethodDescriptor
.<ConnectionProto.GetConnectionRequest, ConnectionProto.Connection>newBuilder()
.setFullMethodName(
"google.cloud.bigquery.connection.v1beta1.ConnectionService/GetConnection")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ConnectionProto.GetConnectionRequest>newBuilder()
.setPath(
"/v1beta1/{name=projects/*/locations/*/connections/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ConnectionProto.GetConnectionRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ConnectionProto.GetConnectionRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<ConnectionProto.Connection>newBuilder()
.setDefaultInstance(ConnectionProto.Connection.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<
ConnectionProto.ListConnectionsRequest, ConnectionProto.ListConnectionsResponse>
listConnectionsMethodDescriptor =
ApiMethodDescriptor
.<ConnectionProto.ListConnectionsRequest, ConnectionProto.ListConnectionsResponse>
newBuilder()
.setFullMethodName(
"google.cloud.bigquery.connection.v1beta1.ConnectionService/ListConnections")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ConnectionProto.ListConnectionsRequest>newBuilder()
.setPath(
"/v1beta1/{parent=projects/*/locations/*}/connections",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ConnectionProto.ListConnectionsRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "parent", request.getParent());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ConnectionProto.ListConnectionsRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "maxResults", request.getMaxResults());
serializer.putQueryParam(fields, "pageToken", request.getPageToken());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<ConnectionProto.ListConnectionsResponse>newBuilder()
.setDefaultInstance(
ConnectionProto.ListConnectionsResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<
ConnectionProto.UpdateConnectionRequest, ConnectionProto.Connection>
updateConnectionMethodDescriptor =
ApiMethodDescriptor
.<ConnectionProto.UpdateConnectionRequest, ConnectionProto.Connection>newBuilder()
.setFullMethodName(
"google.cloud.bigquery.connection.v1beta1.ConnectionService/UpdateConnection")
.setHttpMethod("PATCH")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ConnectionProto.UpdateConnectionRequest>newBuilder()
.setPath(
"/v1beta1/{name=projects/*/locations/*/connections/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ConnectionProto.UpdateConnectionRequest>
serializer = ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ConnectionProto.UpdateConnectionRequest>
serializer = ProtoRestSerializer.create();
serializer.putQueryParam(fields, "updateMask", request.getUpdateMask());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("connection", request.getConnection(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<ConnectionProto.Connection>newBuilder()
.setDefaultInstance(ConnectionProto.Connection.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<ConnectionProto.UpdateConnectionCredentialRequest, Empty>
updateConnectionCredentialMethodDescriptor =
ApiMethodDescriptor.<ConnectionProto.UpdateConnectionCredentialRequest, Empty>newBuilder()
.setFullMethodName(
"google.cloud.bigquery.connection.v1beta1.ConnectionService/UpdateConnectionCredential")
.setHttpMethod("PATCH")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter
.<ConnectionProto.UpdateConnectionCredentialRequest>newBuilder()
.setPath(
"/v1beta1/{name=projects/*/locations/*/connections/*/credential}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ConnectionProto.UpdateConnectionCredentialRequest>
serializer = ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ConnectionProto.UpdateConnectionCredentialRequest>
serializer = ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("credential", request.getCredential(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Empty>newBuilder()
.setDefaultInstance(Empty.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<ConnectionProto.DeleteConnectionRequest, Empty>
deleteConnectionMethodDescriptor =
ApiMethodDescriptor.<ConnectionProto.DeleteConnectionRequest, Empty>newBuilder()
.setFullMethodName(
"google.cloud.bigquery.connection.v1beta1.ConnectionService/DeleteConnection")
.setHttpMethod("DELETE")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ConnectionProto.DeleteConnectionRequest>newBuilder()
.setPath(
"/v1beta1/{name=projects/*/locations/*/connections/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ConnectionProto.DeleteConnectionRequest>
serializer = ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ConnectionProto.DeleteConnectionRequest>
serializer = ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Empty>newBuilder()
.setDefaultInstance(Empty.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<GetIamPolicyRequest, Policy>
getIamPolicyMethodDescriptor =
ApiMethodDescriptor.<GetIamPolicyRequest, Policy>newBuilder()
.setFullMethodName(
"google.cloud.bigquery.connection.v1beta1.ConnectionService/GetIamPolicy")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetIamPolicyRequest>newBuilder()
.setPath(
"/v1beta1/{resource=projects/*/locations/*/connections/*}:getIamPolicy",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetIamPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "resource", request.getResource());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetIamPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("*", request.toBuilder().clearResource().build(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Policy>newBuilder()
.setDefaultInstance(Policy.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<SetIamPolicyRequest, Policy>
setIamPolicyMethodDescriptor =
ApiMethodDescriptor.<SetIamPolicyRequest, Policy>newBuilder()
.setFullMethodName(
"google.cloud.bigquery.connection.v1beta1.ConnectionService/SetIamPolicy")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<SetIamPolicyRequest>newBuilder()
.setPath(
"/v1beta1/{resource=projects/*/locations/*/connections/*}:setIamPolicy",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<SetIamPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "resource", request.getResource());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<SetIamPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("*", request.toBuilder().clearResource().build(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Policy>newBuilder()
.setDefaultInstance(Policy.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsMethodDescriptor =
ApiMethodDescriptor.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder()
.setFullMethodName(
"google.cloud.bigquery.connection.v1beta1.ConnectionService/TestIamPermissions")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<TestIamPermissionsRequest>newBuilder()
.setPath(
"/v1beta1/{resource=projects/*/locations/*/connections/*}:testIamPermissions",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<TestIamPermissionsRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "resource", request.getResource());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<TestIamPermissionsRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("*", request.toBuilder().clearResource().build(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<TestIamPermissionsResponse>newBuilder()
.setDefaultInstance(TestIamPermissionsResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private final UnaryCallable<ConnectionProto.CreateConnectionRequest, ConnectionProto.Connection>
createConnectionCallable;
private final UnaryCallable<ConnectionProto.GetConnectionRequest, ConnectionProto.Connection>
getConnectionCallable;
private final UnaryCallable<
ConnectionProto.ListConnectionsRequest, ConnectionProto.ListConnectionsResponse>
listConnectionsCallable;
private final UnaryCallable<ConnectionProto.UpdateConnectionRequest, ConnectionProto.Connection>
updateConnectionCallable;
private final UnaryCallable<ConnectionProto.UpdateConnectionCredentialRequest, Empty>
updateConnectionCredentialCallable;
private final UnaryCallable<ConnectionProto.DeleteConnectionRequest, Empty>
deleteConnectionCallable;
private final UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable;
private final UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable;
private final UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsCallable;
private final BackgroundResource backgroundResources;
private final HttpJsonStubCallableFactory callableFactory;
public static final HttpJsonConnectionServiceStub create(ConnectionServiceStubSettings settings)
throws IOException {
return new HttpJsonConnectionServiceStub(settings, ClientContext.create(settings));
}
public static final HttpJsonConnectionServiceStub create(ClientContext clientContext)
throws IOException {
return new HttpJsonConnectionServiceStub(
ConnectionServiceStubSettings.newHttpJsonBuilder().build(), clientContext);
}
public static final HttpJsonConnectionServiceStub create(
ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException {
return new HttpJsonConnectionServiceStub(
ConnectionServiceStubSettings.newHttpJsonBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of HttpJsonConnectionServiceStub, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected HttpJsonConnectionServiceStub(
ConnectionServiceStubSettings settings, ClientContext clientContext) throws IOException {
this(settings, clientContext, new HttpJsonConnectionServiceCallableFactory());
}
/**
* Constructs an instance of HttpJsonConnectionServiceStub, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected HttpJsonConnectionServiceStub(
ConnectionServiceStubSettings settings,
ClientContext clientContext,
HttpJsonStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
HttpJsonCallSettings<ConnectionProto.CreateConnectionRequest, ConnectionProto.Connection>
createConnectionTransportSettings =
HttpJsonCallSettings
.<ConnectionProto.CreateConnectionRequest, ConnectionProto.Connection>newBuilder()
.setMethodDescriptor(createConnectionMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
HttpJsonCallSettings<ConnectionProto.GetConnectionRequest, ConnectionProto.Connection>
getConnectionTransportSettings =
HttpJsonCallSettings
.<ConnectionProto.GetConnectionRequest, ConnectionProto.Connection>newBuilder()
.setMethodDescriptor(getConnectionMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<
ConnectionProto.ListConnectionsRequest, ConnectionProto.ListConnectionsResponse>
listConnectionsTransportSettings =
HttpJsonCallSettings
.<ConnectionProto.ListConnectionsRequest, ConnectionProto.ListConnectionsResponse>
newBuilder()
.setMethodDescriptor(listConnectionsMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
HttpJsonCallSettings<ConnectionProto.UpdateConnectionRequest, ConnectionProto.Connection>
updateConnectionTransportSettings =
HttpJsonCallSettings
.<ConnectionProto.UpdateConnectionRequest, ConnectionProto.Connection>newBuilder()
.setMethodDescriptor(updateConnectionMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<ConnectionProto.UpdateConnectionCredentialRequest, Empty>
updateConnectionCredentialTransportSettings =
HttpJsonCallSettings
.<ConnectionProto.UpdateConnectionCredentialRequest, Empty>newBuilder()
.setMethodDescriptor(updateConnectionCredentialMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<ConnectionProto.DeleteConnectionRequest, Empty>
deleteConnectionTransportSettings =
HttpJsonCallSettings.<ConnectionProto.DeleteConnectionRequest, Empty>newBuilder()
.setMethodDescriptor(deleteConnectionMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<GetIamPolicyRequest, Policy> getIamPolicyTransportSettings =
HttpJsonCallSettings.<GetIamPolicyRequest, Policy>newBuilder()
.setMethodDescriptor(getIamPolicyMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("resource", String.valueOf(request.getResource()));
return builder.build();
})
.build();
HttpJsonCallSettings<SetIamPolicyRequest, Policy> setIamPolicyTransportSettings =
HttpJsonCallSettings.<SetIamPolicyRequest, Policy>newBuilder()
.setMethodDescriptor(setIamPolicyMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("resource", String.valueOf(request.getResource()));
return builder.build();
})
.build();
HttpJsonCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsTransportSettings =
HttpJsonCallSettings.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder()
.setMethodDescriptor(testIamPermissionsMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("resource", String.valueOf(request.getResource()));
return builder.build();
})
.build();
this.createConnectionCallable =
callableFactory.createUnaryCallable(
createConnectionTransportSettings, settings.createConnectionSettings(), clientContext);
this.getConnectionCallable =
callableFactory.createUnaryCallable(
getConnectionTransportSettings, settings.getConnectionSettings(), clientContext);
this.listConnectionsCallable =
callableFactory.createUnaryCallable(
listConnectionsTransportSettings, settings.listConnectionsSettings(), clientContext);
this.updateConnectionCallable =
callableFactory.createUnaryCallable(
updateConnectionTransportSettings, settings.updateConnectionSettings(), clientContext);
this.updateConnectionCredentialCallable =
callableFactory.createUnaryCallable(
updateConnectionCredentialTransportSettings,
settings.updateConnectionCredentialSettings(),
clientContext);
this.deleteConnectionCallable =
callableFactory.createUnaryCallable(
deleteConnectionTransportSettings, settings.deleteConnectionSettings(), clientContext);
this.getIamPolicyCallable =
callableFactory.createUnaryCallable(
getIamPolicyTransportSettings, settings.getIamPolicySettings(), clientContext);
this.setIamPolicyCallable =
callableFactory.createUnaryCallable(
setIamPolicyTransportSettings, settings.setIamPolicySettings(), clientContext);
this.testIamPermissionsCallable =
callableFactory.createUnaryCallable(
testIamPermissionsTransportSettings,
settings.testIamPermissionsSettings(),
clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
@InternalApi
public static List<ApiMethodDescriptor> getMethodDescriptors() {
List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>();
methodDescriptors.add(createConnectionMethodDescriptor);
methodDescriptors.add(getConnectionMethodDescriptor);
methodDescriptors.add(listConnectionsMethodDescriptor);
methodDescriptors.add(updateConnectionMethodDescriptor);
methodDescriptors.add(updateConnectionCredentialMethodDescriptor);
methodDescriptors.add(deleteConnectionMethodDescriptor);
methodDescriptors.add(getIamPolicyMethodDescriptor);
methodDescriptors.add(setIamPolicyMethodDescriptor);
methodDescriptors.add(testIamPermissionsMethodDescriptor);
return methodDescriptors;
}
@Override
public UnaryCallable<ConnectionProto.CreateConnectionRequest, ConnectionProto.Connection>
createConnectionCallable() {
return createConnectionCallable;
}
@Override
public UnaryCallable<ConnectionProto.GetConnectionRequest, ConnectionProto.Connection>
getConnectionCallable() {
return getConnectionCallable;
}
@Override
public UnaryCallable<
ConnectionProto.ListConnectionsRequest, ConnectionProto.ListConnectionsResponse>
listConnectionsCallable() {
return listConnectionsCallable;
}
@Override
public UnaryCallable<ConnectionProto.UpdateConnectionRequest, ConnectionProto.Connection>
updateConnectionCallable() {
return updateConnectionCallable;
}
@Override
public UnaryCallable<ConnectionProto.UpdateConnectionCredentialRequest, Empty>
updateConnectionCredentialCallable() {
return updateConnectionCredentialCallable;
}
@Override
public UnaryCallable<ConnectionProto.DeleteConnectionRequest, Empty> deleteConnectionCallable() {
return deleteConnectionCallable;
}
@Override
public UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable() {
return getIamPolicyCallable;
}
@Override
public UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable() {
return setIamPolicyCallable;
}
@Override
public UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsCallable() {
return testIamPermissionsCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
|
apache/kafka | 36,283 | group-coordinator/src/test/java/org/apache/kafka/coordinator/group/assignor/SimpleAssignorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.coordinator.group.assignor;
import org.apache.kafka.common.Uuid;
import org.apache.kafka.coordinator.common.runtime.CoordinatorMetadataImage;
import org.apache.kafka.coordinator.common.runtime.KRaftCoordinatorMetadataImage;
import org.apache.kafka.coordinator.common.runtime.MetadataImageBuilder;
import org.apache.kafka.coordinator.group.api.assignor.GroupAssignment;
import org.apache.kafka.coordinator.group.api.assignor.GroupSpec;
import org.apache.kafka.coordinator.group.api.assignor.MemberAssignment;
import org.apache.kafka.coordinator.group.api.assignor.PartitionAssignorException;
import org.apache.kafka.coordinator.group.modern.Assignment;
import org.apache.kafka.coordinator.group.modern.GroupSpecImpl;
import org.apache.kafka.coordinator.group.modern.MemberSubscriptionAndAssignmentImpl;
import org.apache.kafka.coordinator.group.modern.SubscribedTopicDescriberImpl;
import org.apache.kafka.image.MetadataImage;
import org.apache.kafka.server.common.TopicIdPartition;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import static org.apache.kafka.coordinator.group.AssignmentTestUtil.mkAssignment;
import static org.apache.kafka.coordinator.group.AssignmentTestUtil.mkTopicAssignment;
import static org.apache.kafka.coordinator.group.api.assignor.SubscriptionType.HETEROGENEOUS;
import static org.apache.kafka.coordinator.group.api.assignor.SubscriptionType.HOMOGENEOUS;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
public class SimpleAssignorTest {
private static final Uuid TOPIC_1_UUID = Uuid.randomUuid();
private static final Uuid TOPIC_2_UUID = Uuid.randomUuid();
private static final Uuid TOPIC_3_UUID = Uuid.randomUuid();
private static final Uuid TOPIC_4_UUID = Uuid.randomUuid();
private static final String TOPIC_1_NAME = "topic1";
private static final String TOPIC_2_NAME = "topic2";
private static final String TOPIC_3_NAME = "topic3";
private static final String TOPIC_4_NAME = "topic4";
private static final String MEMBER_A = "A";
private static final String MEMBER_B = "B";
private static final String MEMBER_C = "C";
private final SimpleAssignor assignor = new SimpleAssignor();
@Test
public void testName() {
assertEquals("simple", assignor.name());
}
@Test
public void testAssignWithEmptyMembers() {
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
CoordinatorMetadataImage.EMPTY
);
GroupSpec groupSpec = new GroupSpecImpl(
Map.of(),
HOMOGENEOUS,
Map.of()
);
GroupAssignment groupAssignment = assignor.assign(
groupSpec,
subscribedTopicMetadata
);
assertEquals(Map.of(), groupAssignment.members());
groupSpec = new GroupSpecImpl(
Map.of(),
HETEROGENEOUS,
Map.of()
);
groupAssignment = assignor.assign(
groupSpec,
subscribedTopicMetadata
);
assertEquals(Map.of(), groupAssignment.members());
}
@Test
public void testAssignWithNoSubscribedTopic() {
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, 3)
.build();
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage)
);
Map<String, MemberSubscriptionAndAssignmentImpl> members = Map.of(
MEMBER_A,
new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
Set.of(),
Assignment.EMPTY
)
);
GroupSpec groupSpec = new GroupSpecImpl(
members,
HOMOGENEOUS,
Map.of()
);
GroupAssignment groupAssignment = assignor.assign(
groupSpec,
subscribedTopicMetadata
);
assertEquals(Map.of(), groupAssignment.members());
}
@Test
public void testAssignWithSubscribedToNonExistentTopic() {
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, 3)
.build();
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage)
);
Map<String, MemberSubscriptionAndAssignmentImpl> members = Map.of(
MEMBER_A,
new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
Set.of(TOPIC_2_UUID),
Assignment.EMPTY
)
);
GroupSpec groupSpec = new GroupSpecImpl(
members,
HOMOGENEOUS,
Map.of()
);
assertThrows(PartitionAssignorException.class,
() -> assignor.assign(groupSpec, subscribedTopicMetadata));
}
@Test
public void testAssignWithTwoMembersAndTwoTopicsHomogeneous() {
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, 3)
.addTopic(TOPIC_3_UUID, TOPIC_3_NAME, 2)
.build();
Map<String, MemberSubscriptionAndAssignmentImpl> members = new HashMap<>();
Set<Uuid> topicsSubscription = new LinkedHashSet<>();
topicsSubscription.add(TOPIC_1_UUID);
topicsSubscription.add(TOPIC_3_UUID);
members.put(MEMBER_A, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscription,
Assignment.EMPTY
));
members.put(MEMBER_B, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscription,
Assignment.EMPTY
));
GroupSpec groupSpec = new GroupSpecImpl(
members,
HOMOGENEOUS,
Map.of()
);
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage)
);
GroupAssignment computedAssignment = assignor.assign(
groupSpec,
subscribedTopicMetadata
);
assertEveryPartitionGetsAssignment(5, computedAssignment);
}
@Test
public void testAssignWithTwoMembersAndTwoTopicsHomogeneousWithAllowedMap() {
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, 3)
.addTopic(TOPIC_3_UUID, TOPIC_3_NAME, 3)
.build();
Map<String, MemberSubscriptionAndAssignmentImpl> members = new HashMap<>();
Set<Uuid> topicsSubscription = new LinkedHashSet<>();
topicsSubscription.add(TOPIC_1_UUID);
topicsSubscription.add(TOPIC_3_UUID);
members.put(MEMBER_A, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscription,
Assignment.EMPTY
));
members.put(MEMBER_B, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscription,
Assignment.EMPTY
));
GroupSpec groupSpec = new GroupSpecImpl(
members,
HOMOGENEOUS,
Map.of(),
Optional.of(
Map.of(
TOPIC_1_UUID, Set.of(0, 1, 2),
TOPIC_3_UUID, Set.of(0, 1) // but not 2
)
)
);
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage)
);
GroupAssignment computedAssignment = assignor.assign(
groupSpec,
subscribedTopicMetadata
);
assertEveryPartitionGetsAssignment(5, computedAssignment);
}
@Test
public void testAssignWithTwoMembersAndTwoTopicsHomogeneousWithNonAssignableTopic() {
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, 3)
.addTopic(TOPIC_3_UUID, TOPIC_3_NAME, 2)
.build();
Map<String, MemberSubscriptionAndAssignmentImpl> members = new HashMap<>();
Set<Uuid> topicsSubscription = new LinkedHashSet<>();
topicsSubscription.add(TOPIC_1_UUID);
topicsSubscription.add(TOPIC_3_UUID);
members.put(MEMBER_A, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscription,
Assignment.EMPTY
));
members.put(MEMBER_B, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscription,
Assignment.EMPTY
));
GroupSpec groupSpec = new GroupSpecImpl(
members,
HOMOGENEOUS,
Map.of(),
Optional.of(
Map.of(TOPIC_1_UUID, Set.of(0, 1, 2))
)
);
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage)
);
GroupAssignment computedAssignment = assignor.assign(
groupSpec,
subscribedTopicMetadata
);
assertEveryPartitionGetsAssignment(3, computedAssignment);
}
@Test
public void testAssignWithThreeMembersThreeTopicsHeterogeneous() {
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, 3)
.addTopic(TOPIC_2_UUID, TOPIC_2_NAME, 3)
.addTopic(TOPIC_3_UUID, TOPIC_3_NAME, 2)
.build();
Set<Uuid> memberATopicsSubscription = new LinkedHashSet<>();
memberATopicsSubscription.add(TOPIC_1_UUID);
memberATopicsSubscription.add(TOPIC_2_UUID);
Map<String, MemberSubscriptionAndAssignmentImpl> members = new HashMap<>();
members.put(MEMBER_A, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
memberATopicsSubscription,
Assignment.EMPTY
));
members.put(MEMBER_B, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
Set.of(TOPIC_3_UUID),
Assignment.EMPTY
));
Set<Uuid> memberCTopicsSubscription = new LinkedHashSet<>();
memberCTopicsSubscription.add(TOPIC_2_UUID);
memberCTopicsSubscription.add(TOPIC_3_UUID);
members.put(MEMBER_C, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
memberCTopicsSubscription,
Assignment.EMPTY
));
GroupSpec groupSpec = new GroupSpecImpl(
members,
HETEROGENEOUS,
Map.of()
);
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage)
);
GroupAssignment computedAssignment = assignor.assign(
groupSpec,
subscribedTopicMetadata
);
// T1: 3 partitions + T2: 3 partitions + T3: 2 partitions = 8 partitions
assertEveryPartitionGetsAssignment(8, computedAssignment);
}
@Test
public void testAssignWithThreeMembersThreeTopicsHeterogeneousWithAllowedMap() {
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, 3)
.addTopic(TOPIC_2_UUID, TOPIC_2_NAME, 3)
.addTopic(TOPIC_3_UUID, TOPIC_3_NAME, 2)
.build();
Set<Uuid> memberATopicsSubscription = new LinkedHashSet<>();
memberATopicsSubscription.add(TOPIC_1_UUID);
memberATopicsSubscription.add(TOPIC_2_UUID);
Map<String, MemberSubscriptionAndAssignmentImpl> members = new HashMap<>();
members.put(MEMBER_A, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
memberATopicsSubscription,
Assignment.EMPTY
));
members.put(MEMBER_B, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
Set.of(TOPIC_3_UUID),
Assignment.EMPTY
));
Set<Uuid> memberCTopicsSubscription = new LinkedHashSet<>();
memberCTopicsSubscription.add(TOPIC_2_UUID);
memberCTopicsSubscription.add(TOPIC_3_UUID);
members.put(MEMBER_C, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
memberCTopicsSubscription,
Assignment.EMPTY
));
GroupSpec groupSpec = new GroupSpecImpl(
members,
HETEROGENEOUS,
Map.of(),
Optional.of(
Map.of(
TOPIC_1_UUID, Set.of(0, 1), // but not 2
TOPIC_2_UUID, Set.of(0, 2), // but not 1
TOPIC_3_UUID, Set.of(1) // but not 0
)
)
);
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage)
);
GroupAssignment computedAssignment = assignor.assign(
groupSpec,
subscribedTopicMetadata
);
// T1: 2 partitions + T2: 2 partitions + T3: 1 partition = 5 partitions
assertEveryPartitionGetsAssignment(5, computedAssignment);
}
@Test
public void testAssignWithThreeMembersThreeTopicsHeterogeneousWithNonAssignableTopic() {
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, 3)
.addTopic(TOPIC_2_UUID, TOPIC_2_NAME, 3)
.addTopic(TOPIC_3_UUID, TOPIC_3_NAME, 2) // non-assignable
.build();
Set<Uuid> memberATopicsSubscription = new LinkedHashSet<>();
memberATopicsSubscription.add(TOPIC_1_UUID);
memberATopicsSubscription.add(TOPIC_2_UUID);
Map<String, MemberSubscriptionAndAssignmentImpl> members = new HashMap<>();
members.put(MEMBER_A, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
memberATopicsSubscription,
Assignment.EMPTY
));
members.put(MEMBER_B, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
Set.of(TOPIC_3_UUID),
Assignment.EMPTY
));
Set<Uuid> memberCTopicsSubscription = new LinkedHashSet<>();
memberCTopicsSubscription.add(TOPIC_2_UUID);
memberCTopicsSubscription.add(TOPIC_3_UUID);
members.put(MEMBER_C, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
memberCTopicsSubscription,
Assignment.EMPTY
));
GroupSpec groupSpec = new GroupSpecImpl(
members,
HETEROGENEOUS,
Map.of(),
Optional.of(
Map.of(
TOPIC_1_UUID, Set.of(0, 1, 2),
TOPIC_2_UUID, Set.of(0, 1, 2)
)
)
);
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage)
);
GroupAssignment computedAssignment = assignor.assign(
groupSpec,
subscribedTopicMetadata
);
Map<String, Map<Uuid, Set<Integer>>> expectedAssignment = new HashMap<>();
expectedAssignment.put(MEMBER_A, mkAssignment(
mkTopicAssignment(TOPIC_1_UUID, 0, 1, 2),
mkTopicAssignment(TOPIC_2_UUID, 0, 2)
));
expectedAssignment.put(MEMBER_B, Map.of());
expectedAssignment.put(MEMBER_C, mkAssignment(
mkTopicAssignment(TOPIC_2_UUID, 1)
));
// T1: 3 partitions + T2: 3 partitions + T3: 2 partitions(non-assignable) = 6 partitions
assertEveryPartitionGetsAssignment(6, computedAssignment);
assertAssignment(expectedAssignment, computedAssignment);
}
@Test
public void testAssignWithOneMemberNoAssignedTopicHeterogeneous() {
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, 3)
.addTopic(TOPIC_2_UUID, TOPIC_2_NAME, 2)
.build();
Set<Uuid> memberATopicsSubscription = new LinkedHashSet<>();
memberATopicsSubscription.add(TOPIC_1_UUID);
memberATopicsSubscription.add(TOPIC_2_UUID);
Map<String, MemberSubscriptionAndAssignmentImpl> members = new HashMap<>();
members.put(MEMBER_A, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
memberATopicsSubscription,
Assignment.EMPTY
));
members.put(MEMBER_B, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
Set.of(),
Assignment.EMPTY
));
GroupSpec groupSpec = new GroupSpecImpl(
members,
HETEROGENEOUS,
Map.of()
);
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage)
);
GroupAssignment computedAssignment = assignor.assign(
groupSpec,
subscribedTopicMetadata
);
Map<String, Map<Uuid, Set<Integer>>> expectedAssignment = new HashMap<>();
expectedAssignment.put(MEMBER_A, mkAssignment(
mkTopicAssignment(TOPIC_1_UUID, 0, 1, 2),
mkTopicAssignment(TOPIC_2_UUID, 0, 1)));
expectedAssignment.put(MEMBER_B, mkAssignment());
// T1: 3 partitions + T2: 2 partitions = 5 partitions
assertEveryPartitionGetsAssignment(5, computedAssignment);
assertAssignment(expectedAssignment, computedAssignment);
}
@Test
public void testIncrementalAssignmentIncreasingMembersHomogeneous() {
final int numPartitions = 24;
final int numMembers = 101;
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, numPartitions)
.build();
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage)
);
Set<Uuid> topicsSubscription = new LinkedHashSet<>();
topicsSubscription.add(TOPIC_1_UUID);
Map<String, MemberSubscriptionAndAssignmentImpl> members = new HashMap<>();
SimpleAssignor assignor = new SimpleAssignor();
// Increase the number of members one a time, checking that the partitions are assigned as expected
for (int member = 0; member < numMembers; member++) {
String newMemberId = "M" + member;
members.put(newMemberId, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscription,
Assignment.EMPTY
));
GroupSpec groupSpec = new GroupSpecImpl(
members,
HOMOGENEOUS,
new HashMap<>()
);
GroupAssignment computedAssignment = assignor.assign(groupSpec, subscribedTopicMetadata);
assertEveryPartitionGetsAssignment(numPartitions, computedAssignment);
computedAssignment.members().forEach((memberId, partitions) -> members.put(memberId, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscription,
new Assignment(partitions.partitions())
)));
}
}
@Test
public void testIncrementalAssignmentDecreasingMembersHomogeneous() {
final int numPartitions = 24;
final int numMembers = 101;
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, numPartitions)
.build();
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage)
);
Set<Uuid> topicsSubscription = new LinkedHashSet<>();
topicsSubscription.add(TOPIC_1_UUID);
Map<String, MemberSubscriptionAndAssignmentImpl> members = new HashMap<>();
SimpleAssignor assignor = new SimpleAssignor();
for (int member = 0; member < numMembers; member++) {
String newMemberId = "M" + member;
members.put(newMemberId, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscription,
Assignment.EMPTY
));
}
GroupSpec groupSpec = new GroupSpecImpl(
members,
HOMOGENEOUS,
new HashMap<>()
);
GroupAssignment computedAssignment = assignor.assign(groupSpec, subscribedTopicMetadata);
assertEveryPartitionGetsAssignment(numPartitions, computedAssignment);
for (int member = 0; member < numMembers; member++) {
String newMemberId = "M" + member;
members.put(newMemberId, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscription,
new Assignment(computedAssignment.members().get(newMemberId).partitions()))
);
}
// Decrease the number of members one a time, checking that the partitions are assigned as expected
for (int member = numMembers - 1; member > 0; member--) {
String newMemberId = "M" + member;
members.remove(newMemberId);
groupSpec = new GroupSpecImpl(
members,
HOMOGENEOUS,
new HashMap<>()
);
computedAssignment = assignor.assign(groupSpec, subscribedTopicMetadata);
assertEveryPartitionGetsAssignment(numPartitions, computedAssignment);
computedAssignment.members().forEach((memberId, partitions) -> members.put(memberId, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscription,
new Assignment(partitions.partitions())
)));
}
}
@Test
public void testAssignWithCurrentAssignmentHeterogeneous() {
// Current assignment setup - 3 members A - {T1, T2}, B - {T3}, C - {T2, T3}.
MetadataImage metadataImage1 = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, 3)
.addTopic(TOPIC_2_UUID, TOPIC_2_NAME, 3)
.addTopic(TOPIC_3_UUID, TOPIC_3_NAME, 2)
.build();
Set<Uuid> memberATopicsSubscription1 = new LinkedHashSet<>();
memberATopicsSubscription1.add(TOPIC_1_UUID);
memberATopicsSubscription1.add(TOPIC_2_UUID);
Map<String, MemberSubscriptionAndAssignmentImpl> members1 = new HashMap<>();
members1.put(MEMBER_A, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
memberATopicsSubscription1,
Assignment.EMPTY
));
members1.put(MEMBER_B, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
Set.of(TOPIC_3_UUID),
Assignment.EMPTY
));
Set<Uuid> memberCTopicsSubscription1 = new LinkedHashSet<>();
memberCTopicsSubscription1.add(TOPIC_2_UUID);
memberCTopicsSubscription1.add(TOPIC_3_UUID);
members1.put(MEMBER_C, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
memberCTopicsSubscription1,
Assignment.EMPTY
));
GroupSpec groupSpec1 = new GroupSpecImpl(
members1,
HETEROGENEOUS,
Map.of()
);
SubscribedTopicDescriberImpl subscribedTopicMetadata1 = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage1)
);
GroupAssignment computedAssignment1 = assignor.assign(
groupSpec1,
subscribedTopicMetadata1
);
assertEveryPartitionGetsAssignment(8, computedAssignment1);
// New assignment setup - 2 members A - {T1, T2, T3}, B - {T3, T4}.
MetadataImage metadataImage2 = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, 3)
.addTopic(TOPIC_2_UUID, TOPIC_2_NAME, 3)
.addTopic(TOPIC_3_UUID, TOPIC_3_NAME, 2)
.addTopic(TOPIC_4_UUID, TOPIC_4_NAME, 1)
.build();
Map<String, MemberSubscriptionAndAssignmentImpl> members2 = new HashMap<>();
Set<Uuid> memberATopicsSubscription2 = new LinkedHashSet<>();
memberATopicsSubscription2.add(TOPIC_1_UUID);
memberATopicsSubscription2.add(TOPIC_2_UUID);
memberATopicsSubscription2.add(TOPIC_3_UUID);
Set<Uuid> memberBTopicsSubscription2 = new LinkedHashSet<>();
memberBTopicsSubscription2.add(TOPIC_3_UUID);
memberBTopicsSubscription2.add(TOPIC_4_UUID);
members2.put(MEMBER_A, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
memberATopicsSubscription2,
new Assignment(mkAssignment(
mkTopicAssignment(TOPIC_1_UUID, 0, 1, 2),
mkTopicAssignment(TOPIC_2_UUID, 0, 2)))
));
members2.put(MEMBER_B, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
memberBTopicsSubscription2,
new Assignment(mkAssignment(
mkTopicAssignment(TOPIC_3_UUID, 0, 1)))
));
GroupSpec groupSpec2 = new GroupSpecImpl(
members2,
HETEROGENEOUS,
Map.of()
);
SubscribedTopicDescriberImpl subscribedTopicMetadata2 = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage2)
);
GroupAssignment computedAssignment2 = assignor.assign(
groupSpec2,
subscribedTopicMetadata2
);
assertEveryPartitionGetsAssignment(9, computedAssignment2);
}
@Test
public void testIncrementalAssignmentIncreasingMembersHeterogeneous() {
final int numPartitions = 24;
final int numMembers = 101;
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, numPartitions / 2)
.addTopic(TOPIC_2_UUID, TOPIC_2_NAME, numPartitions / 3)
.addTopic(TOPIC_3_UUID, TOPIC_3_NAME, numPartitions / 6)
.buildCoordinatorMetadataImage();
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
metadataImage
);
ArrayList<Set<Uuid>> topicsSubscriptions = new ArrayList<>(3);
Set<Uuid> topicsSubscription1 = new LinkedHashSet<>();
topicsSubscription1.add(TOPIC_1_UUID);
topicsSubscription1.add(TOPIC_2_UUID);
topicsSubscription1.add(TOPIC_3_UUID);
topicsSubscriptions.add(topicsSubscription1);
Set<Uuid> topicsSubscription2 = new LinkedHashSet<>();
topicsSubscription2.add(TOPIC_2_UUID);
topicsSubscriptions.add(topicsSubscription2);
Set<Uuid> topicsSubscription3 = new LinkedHashSet<>();
topicsSubscription3.add(TOPIC_3_UUID);
topicsSubscriptions.add(topicsSubscription3);
Set<Uuid> topicsSubscription4 = new LinkedHashSet<>();
topicsSubscription4.add(TOPIC_1_UUID);
topicsSubscription4.add(TOPIC_2_UUID);
topicsSubscriptions.add(topicsSubscription4);
int numTopicsSubscriptions = 4;
Map<String, MemberSubscriptionAndAssignmentImpl> members = new HashMap<>();
SimpleAssignor assignor = new SimpleAssignor();
// Increase the number of members one a time, checking that the partitions are assigned as expected
for (int member = 0; member < numMembers; member++) {
String newMemberId = "M" + member;
members.put(newMemberId, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscriptions.get(member % numTopicsSubscriptions),
Assignment.EMPTY
));
GroupSpec groupSpec = new GroupSpecImpl(
members,
HETEROGENEOUS,
new HashMap<>()
);
GroupAssignment computedAssignment = assignor.assign(groupSpec, subscribedTopicMetadata);
assertEveryPartitionGetsAssignment(numPartitions, computedAssignment);
for (int m = 0; m < member; m++) {
String memberId = "M" + m;
members.put(memberId, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscriptions.get(m % numTopicsSubscriptions),
new Assignment(computedAssignment.members().get(memberId).partitions())
));
}
}
}
@Test
public void testIncrementalAssignmentDecreasingMembersHeterogeneous() {
final int numPartitions = 24;
final int numMembers = 101;
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, numPartitions / 2)
.addTopic(TOPIC_2_UUID, TOPIC_2_NAME, numPartitions / 3)
.addTopic(TOPIC_3_UUID, TOPIC_3_NAME, numPartitions / 6)
.buildCoordinatorMetadataImage();
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
metadataImage
);
ArrayList<Set<Uuid>> topicsSubscriptions = new ArrayList<>(3);
Set<Uuid> topicsSubscription1 = new LinkedHashSet<>();
topicsSubscription1.add(TOPIC_1_UUID);
topicsSubscription1.add(TOPIC_2_UUID);
topicsSubscription1.add(TOPIC_3_UUID);
topicsSubscriptions.add(topicsSubscription1);
Set<Uuid> topicsSubscription2 = new LinkedHashSet<>();
topicsSubscription2.add(TOPIC_2_UUID);
topicsSubscriptions.add(topicsSubscription2);
Set<Uuid> topicsSubscription3 = new LinkedHashSet<>();
topicsSubscription3.add(TOPIC_3_UUID);
topicsSubscriptions.add(topicsSubscription3);
Set<Uuid> topicsSubscription4 = new LinkedHashSet<>();
topicsSubscription4.add(TOPIC_1_UUID);
topicsSubscription4.add(TOPIC_2_UUID);
topicsSubscriptions.add(topicsSubscription4);
int numTopicsSubscriptions = 4;
Map<String, MemberSubscriptionAndAssignmentImpl> members = new HashMap<>();
SimpleAssignor assignor = new SimpleAssignor();
for (int member = 0; member < numMembers; member++) {
String newMemberId = "M" + member;
members.put(newMemberId, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscriptions.get(member % numTopicsSubscriptions),
Assignment.EMPTY
));
}
GroupSpec groupSpec = new GroupSpecImpl(
members,
HETEROGENEOUS,
new HashMap<>()
);
GroupAssignment computedAssignment = assignor.assign(groupSpec, subscribedTopicMetadata);
assertEveryPartitionGetsAssignment(numPartitions, computedAssignment);
for (int member = 0; member < numMembers; member++) {
String newMemberId = "M" + member;
members.put(newMemberId, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscriptions.get(member % numTopicsSubscriptions),
new Assignment(computedAssignment.members().get(newMemberId).partitions()))
);
}
// Decrease the number of members one a time, checking that the partitions are assigned as expected
for (int member = numMembers - 1; member > 0; member--) {
String newMemberId = "M" + member;
members.remove(newMemberId);
groupSpec = new GroupSpecImpl(
members,
HETEROGENEOUS,
new HashMap<>()
);
computedAssignment = assignor.assign(groupSpec, subscribedTopicMetadata);
assertEveryPartitionGetsAssignment(numPartitions, computedAssignment);
for (int m = 0; m < member; m++) {
String memberId = "M" + m;
members.put(memberId, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscriptions.get(m % numTopicsSubscriptions),
new Assignment(computedAssignment.members().get(memberId).partitions())
));
}
}
}
private void assertAssignment(
Map<String, Map<Uuid, Set<Integer>>> expectedAssignment,
GroupAssignment computedGroupAssignment
) {
assertEquals(expectedAssignment.size(), computedGroupAssignment.members().size());
for (String memberId : computedGroupAssignment.members().keySet()) {
Map<Uuid, Set<Integer>> computedAssignmentForMember = computedGroupAssignment.members().get(memberId).partitions();
assertEquals(expectedAssignment.get(memberId), computedAssignmentForMember);
}
}
private void assertEveryPartitionGetsAssignment(
int expectedPartitions,
GroupAssignment computedGroupAssignment
) {
Map<String, MemberAssignment> memberAssignments = computedGroupAssignment.members();
Set<TopicIdPartition> topicPartitionAssignments = new HashSet<>();
memberAssignments.values().forEach(memberAssignment -> {
Map<Uuid, Set<Integer>> topicIdPartitions = memberAssignment.partitions();
topicIdPartitions.forEach((topicId, partitions) ->
partitions.forEach(partition -> topicPartitionAssignments.add(new TopicIdPartition(topicId, partition)))
);
});
assertEquals(expectedPartitions, topicPartitionAssignments.size());
}
}
|
oracle/coherence | 35,995 | prj/coherence-core/src/main/java/com/tangosol/dev/assembler/Method.java | /*
* Copyright (c) 2000, 2020, Oracle and/or its affiliates.
*
* Licensed under the Universal Permissive License v 1.0 as shown at
* http://oss.oracle.com/licenses/upl.
*/
package com.tangosol.dev.assembler;
import com.tangosol.util.NullImplementation;
import com.tangosol.util.StringTable;
import java.io.IOException;
import java.io.DataInput;
import java.io.DataOutput;
import java.util.Enumeration;
import java.util.Vector;
/**
* Represents a Java Virtual Machine Method structure as defined by the Java
* Virtual Machine (JVM) Specification.
*
* @version 0.50, 05/14/98, assembler/dis-assembler
* @author Cameron Purdy
*/
public class Method extends VMStructure implements Constants
{
// ----- construction ---------------------------------------------------
/**
* Construct a method structure. Used by ClassFile disassembly.
*
* @param sClass the class name containing this method
* @param fInterface whether this method is defined on an interface
*/
protected Method(String sClass, boolean fInterface)
{
m_sClass = sClass;
f_fInterface = fInterface;
}
/**
* Construct a method structure.
*
* @param sName the method name
* @param sSig the method signature
* @param fInterface whether this method is defined on an interface
*/
protected Method(String sName, String sSig, boolean fInterface)
{
this(new UtfConstant(sName), new UtfConstant(sSig.replace('.','/')), fInterface);
}
/**
* Construct a method which references the passed UTF constants.
*
* @param constantName the referenced UTF constant which contains the
* name of the method
* @param constantSig the referenced UTF constant which contains the
* method signature
* @param fInterface whether this method is defined on an interface
*/
protected Method(UtfConstant constantName, UtfConstant constantSig, boolean fInterface)
{
if (constantName == null || constantSig == null)
{
throw new IllegalArgumentException(CLASS + ": Values cannot be null!");
}
m_utfName = constantName;
m_utfSig = constantSig;
f_fInterface = fInterface;
}
// ----- VMStructure operations -----------------------------------------
/**
* The disassembly process reads the structure from the passed input
* stream and uses the constant pool to dereference any constant
* references.
*
* @param stream the stream implementing java.io.DataInput from which
* to read the assembled VM structure
* @param pool the constant pool for the class which contains any
* constants referenced by this VM structure
*/
protected void disassemble(DataInput stream, ConstantPool pool)
throws IOException
{
// access flags
m_flags.disassemble(stream, pool);
// name and signature
m_utfName = (UtfConstant) pool.getConstant(stream.readUnsignedShort());
m_utfSig = (UtfConstant) pool.getConstant(stream.readUnsignedShort());
// attributes
m_tblAttribute.clear();
int cAttr = stream.readUnsignedShort();
for (int i = 0; i < cAttr; ++i)
{
Attribute attr = Attribute.loadAttribute(this, stream, pool);
m_tblAttribute.put(attr.getIdentity(), attr);
}
}
/**
* The pre-assembly step collects the necessary entries for the constant
* pool. During this step, all constants used by this VM structure and
* any sub-structures are registered with (but not yet bound by position
* in) the constant pool.
*
* @param pool the constant pool for the class which needs to be
* populated with the constants required to build this
* VM structure
*/
protected void preassemble(ConstantPool pool)
{
pool.registerConstant(m_utfName);
pool.registerConstant(m_utfSig );
m_flags.preassemble(pool);
Enumeration enmr = m_tblAttribute.elements();
while (enmr.hasMoreElements())
{
Attribute attr = (Attribute) enmr.nextElement();
try
{
attr.preassemble(pool);
}
catch (Throwable e)
{
if (attr.getName().equals(ATTR_CODE))
{
out("Code pre-assembly error in: " + toString());
((CodeAttribute) attr).print();
}
if (e instanceof RuntimeException)
{
throw (RuntimeException) e;
}
else
{
throw (Error) e;
}
}
}
}
/**
* The assembly process assembles and writes the structure to the passed
* output stream, resolving any dependencies using the passed constant
* pool.
*
* @param stream the stream implementing java.io.DataOutput to which to
* write the assembled VM structure
* @param pool the constant pool for the class which by this point
* contains the entire set of constants required to build
* this VM structure
*/
protected void assemble(DataOutput stream, ConstantPool pool)
throws IOException
{
m_flags.assemble(stream, pool);
stream.writeShort(pool.findConstant(m_utfName));
stream.writeShort(pool.findConstant(m_utfSig ));
stream.writeShort(m_tblAttribute.getSize());
Enumeration enmr = m_tblAttribute.elements();
while (enmr.hasMoreElements())
{
Attribute attr = (Attribute) enmr.nextElement();
try
{
attr.assemble(stream, pool);
}
catch (Throwable e)
{
if (attr.getName().equals(ATTR_CODE))
{
out("Code assembly error in: " + toString());
((CodeAttribute) attr).print();
}
if (e instanceof RuntimeException)
{
throw (RuntimeException) e;
}
else
{
throw (Error) e;
}
}
}
}
/**
* Determine the identity of the VM structure (if applicable).
*
* @return the string identity of the VM structure
*/
public String getIdentity()
{
return m_utfName.getValue() + m_utfSig.getValue();
}
/**
* Determine if the VM structure (or any contained VM structure) has been
* modified.
*
* @return true if the VM structure has been modified
*/
public boolean isModified()
{
if (m_fModified || m_flags.isModified())
{
return true;
}
Enumeration enmr = m_tblAttribute.elements();
while (enmr.hasMoreElements())
{
Attribute attr = (Attribute) enmr.nextElement();
if (attr.isModified())
{
return true;
}
}
return false;
}
/**
* Reset the modified state of the VM structure.
*/
protected void resetModified()
{
m_flags.resetModified();
Enumeration enmr = m_tblAttribute.elements();
while (enmr.hasMoreElements())
{
((Attribute) enmr.nextElement()).resetModified();
}
m_fModified = false;
}
// ----- Comparable operations ------------------------------------------
/**
* Compares this Object with the specified Object for order. Returns a
* negative integer, zero, or a positive integer as this Object is less
* than, equal to, or greater than the given Object.
*
* @param obj the <code>Object</code> to be compared.
*
* @return a negative integer, zero, or a positive integer as this Object
* is less than, equal to, or greater than the given Object.
*
* @exception ClassCastException the specified Object's type prevents it
* from being compared to this Object.
*/
public int compareTo(Object obj)
{
Method that = (Method) obj;
int nResult = this.m_utfName.compareTo(that.m_utfName);
if (nResult == 0)
{
nResult = this.m_utfSig.compareTo(that.m_utfSig);
}
return nResult;
}
// ----- Object operations ----------------------------------------------
/**
* Produce a human-readable string describing the method.
*
* @return a string describing the method
*/
public String toString()
{
String sMods = m_flags.toString(ACC_METHOD);
String sName = m_utfName.getValue();
String[] asType = toTypeStrings(m_utfSig.getValue());
int cType = asType.length;
StringBuffer sb = new StringBuffer();
if (sMods.length() > 0)
{
sb.append(sMods)
.append(' ');
}
sb.append(asType[0])
.append(' ')
.append(sName)
.append('(');
for (int i = 1; i < cType; ++i)
{
if (i > 1)
{
sb.append(", ");
}
sb.append(asType[i]);
}
sb.append(')');
return sb.toString();
}
/**
* Compare this object to another object for equality.
*
* @param obj the other object to compare to this
*
* @return true if this object equals that object
*/
public boolean equals(Object obj)
{
try
{
Method that = (Method) obj;
return this == that
|| this.getClass() == that.getClass()
&& this.m_utfName .equals(that.m_utfName )
&& this.m_utfSig .equals(that.m_utfSig )
&& this.m_flags .equals(that.m_flags )
&& this.m_tblAttribute.equals(that.m_tblAttribute);
}
catch (NullPointerException e)
{
// obj is null
return false;
}
catch (ClassCastException e)
{
// obj is not of this class
return false;
}
}
// ----- Method operations ----------------------------------------------
/**
* Add a description (name and access flags) to a parameter at index
* {@code iParam} in the method signature.
* <p>
* Access flags is restricted to the following bit values:
* <ol>
* <li>{@link MethodParametersAttribute.MethodParameter#ACC_FINAL final}</li>
* <li>{@link MethodParametersAttribute.MethodParameter#ACC_SYNTHETIC synthetic}</li>
* <li>{@link MethodParametersAttribute.MethodParameter#ACC_MANDATED mandated}</li>
* </ol>
*
* @param iParam an index to the parameter in the method signature
* @param sName the name of the parameter
* @param nFlags the access flags for the parameter; bit-mask of the following
* bits: {@link MethodParametersAttribute.MethodParameter#ACC_FINAL final},
* {@link MethodParametersAttribute.MethodParameter#ACC_SYNTHETIC synthetic},
* {@link MethodParametersAttribute.MethodParameter#ACC_MANDATED mandated}
*
* @return whether the parameter description was added
*/
public boolean addParameter(int iParam, String sName, int nFlags)
{
String[] asTypes = getTypes();
int cParams = asTypes.length - 1;
if (iParam > cParams)
{
return false;
}
MethodParametersAttribute attrParams = ensureMethodParameters(cParams);
return attrParams.addParameter(iParam, sName, nFlags);
}
/**
* Parse the method signature into discrete return type and parameter
* signatures as they would appear in Java source.
*
* @param sSig the JVM method signature
*
* @return an array of Java type strings, where [0] is the return
* type and [1]..[c] are the parameter types.
*/
public static String[] toTypeStrings(String sSig)
{
String[] asType = toTypes(sSig);
int cTypes = asType.length;
for (int i = 0; i < cTypes; ++i)
{
asType[i] = Field.toTypeString(asType[i]);
}
return asType;
}
/**
* Parse the method signature into discrete return type and parameter
* signatures as they appear in Java .class structures.
*
* @param sSig the JVM method signature
*
* @return an array of JVM type signatures, where [0] is the return
* type and [1]..[c] are the parameter types.
*/
public static String[] toTypes(String sSig)
{
// check for start of signature
char[] ach = sSig.toCharArray();
if (ach[0] != '(')
{
throw new IllegalArgumentException("JVM Method Signature must start with '('");
}
// reserve the first element for the return value
Vector vect = new Vector();
vect.addElement(null);
// parse parameter signatures
int of = 1;
while (ach[of] != ')')
{
int cch = getTypeLength(ach, of);
vect.addElement(new String(ach, of, cch));
of += cch;
}
// return value starts after the parameter-stop character
// and runs to the end of the method signature
++of;
vect.setElementAt(new String(ach, of, ach.length - of), 0);
String[] asSig = new String[vect.size()];
vect.copyInto(asSig);
return asSig;
}
private static int getTypeLength(char[] ach, int of)
{
switch (ach[of])
{
case 'V':
case 'Z':
case 'B':
case 'C':
case 'S':
case 'I':
case 'J':
case 'F':
case 'D':
return 1;
case '[':
{
int cch = 1;
while (isDecimal(ach[++of]))
{
++cch;
}
return cch + getTypeLength(ach, of);
}
case 'L':
{
int cch = 2;
while (ach[++of] != ';')
{
++cch;
}
return cch;
}
default:
throw new IllegalArgumentException("JVM Type Signature cannot start with '" + ach[of] + "'");
}
}
// ----- accessors: name and type --------------------------------------
/**
* Get the name of the method as a string.
*
* @return the method name
*/
public String getName()
{
return m_utfName.getValue();
}
/**
* Get the signature of the method as a string. (This is not called
* "getSignature" because that could imply a "SignatureConstant", which
* is itself both name and type.)
*
* @return the method signature
*/
public String getType()
{
return m_utfSig.getValue();
}
/**
* Get the types of the method parameters and return value as they would
* appear in JVM .class structures.
*
* @return an array of JVM type signatures, [0] for the return value type
* and [1..n] for the parameter types
*/
public String[] getTypes()
{
return toTypes(m_utfSig.getValue());
}
/**
* Get the types of the method parameters and return value as they would
* appear in Java source.
*
* @return an array of strings, [0] for the return value type and [1..n]
* for the parameter types
*/
public String[] getTypeStrings()
{
return toTypeStrings(m_utfSig.getValue());
}
/**
* Get the names of the method parameters as they appear in source code.
* This only works for code which has been assembled or disassembled,
* not for code which has been created but not yet assembled.
*
* @return an array of parameter names, [0] for the return value and
* [1..n] for the parameters; the return value name is always
* null and the other names are null if debugging information
* is not available
*/
public String[] getNames()
{
String[] as = getTypes();
int c = as.length;
CodeAttribute code = (CodeAttribute) m_tblAttribute.get(ATTR_CODE);
if (code != null)
{
LocalVariableTableAttribute vars =
(LocalVariableTableAttribute) code.getAttribute(ATTR_VARIABLES);
if (vars != null)
{
int cwBase = isStatic() ? 0 : 1;
int cwParams = cwBase;
for (int i = 1; i < c; ++i)
{
switch (as[i].charAt(0))
{
default:
cwParams += 1;
break;
case 'D':
case 'J':
cwParams += 2;
break;
}
}
int[] aiSlotToParam = new int[cwParams];
cwParams = cwBase;
for (int i = 1; i < c; ++i)
{
// the slot number (cwParams) corresponds to the 1-based
// parameter number (i)
aiSlotToParam[cwParams] = i;
switch (as[i].charAt(0))
{
default:
cwParams += 1;
break;
case 'D':
case 'J':
cwParams += 2;
break;
}
// clear param type
as[i] = null;
}
// clear return type
as[0] = null;
for (Enumeration enmr = vars.ranges(); enmr.hasMoreElements(); )
{
AbstractLocalVariableTableAttribute.Range range =
(AbstractLocalVariableTableAttribute.Range) enmr.nextElement();
if (range.getSlot() < cwParams)
{
int i = aiSlotToParam[range.getSlot()];
if (i > 0 && as[i] == null)
{
as[i] = range.getVariableName();
}
}
}
return as;
}
}
// clear names
for (int i = 0; i < c; ++i)
{
as[i] = null;
}
return as;
}
/**
* Get the UTF constant which holds the method name.
*
* @return the UTF constant which contains the name
*/
public UtfConstant getNameConstant()
{
return m_utfName;
}
/**
* Get the UTF constant which holds the method signature.
*
* @return the UTF constant which contains the signature
*/
public UtfConstant getTypeConstant()
{
return m_utfSig;
}
// ----- accessor: access ----------------------------------------------
/**
* Get the method accessibility value.
*
* @return one of ACC_PUBLIC, ACC_PROTECTED, ACC_PRIVATE, or ACC_PACKAGE
*/
public int getAccess()
{
return m_flags.getAccess();
}
/**
* Set the method accessibility value.
*
* @param nAccess should be one of ACC_PUBLIC, ACC_PROTECTED,
* ACC_PRIVATE, or ACC_PACKAGE
*/
public void setAccess(int nAccess)
{
m_flags.setAccess(nAccess);
}
/**
* Determine if the accessibility is public.
*
* @return true if the accessibility is public
*/
public boolean isPublic()
{
return m_flags.isPublic();
}
/**
* Set the accessibility to public.
*/
public void setPublic()
{
m_flags.setPublic();
}
/**
* Determine if the accessibility is protected.
*
* @return true if the accessibility is protected
*/
public boolean isProtected()
{
return m_flags.isProtected();
}
/**
* Set the accessibility to protected.
*/
public void setProtected()
{
m_flags.setProtected();
}
/**
* Determine if the accessibility is package private.
*
* @return true if the accessibility is package private
*/
public boolean isPackage()
{
return m_flags.isPackage();
}
/**
* Set the accessibility to package private.
*/
public void setPackage()
{
m_flags.setPackage();
}
/**
* Determine if the accessibility is private.
*
* @return true if the accessibility is private
*/
public boolean isPrivate()
{
return m_flags.isPrivate();
}
/**
* Set the accessibility to private.
*/
public void setPrivate()
{
m_flags.setPrivate();
}
// ----- accessor: static -------------------------------------------
/**
* Determine if the Static attribute is set.
*
* @return true if Static
*/
public boolean isStatic()
{
return m_flags.isStatic();
}
/**
* Set the Static attribute.
*
* @param fStatic true to set to Static, false otherwise
*/
public void setStatic(boolean fStatic)
{
m_flags.setStatic(fStatic);
}
// ----- accessor: final -------------------------------------------
/**
* Determine if the Final attribute is set.
*
* @return true if Final
*/
public boolean isFinal()
{
return m_flags.isFinal();
}
/**
* Set the Final attribute.
*
* @param fFinal true to set to Final, false otherwise
*/
public void setFinal(boolean fFinal)
{
m_flags.setFinal(fFinal);
}
// ----- accessor: synchronized -------------------------------------------
/**
* Determine if the synchronized attribute is set.
*
* @return true if synchronized
*/
public boolean isSynchronized()
{
return m_flags.isSynchronized();
}
/**
* Set the synchronized attribute.
*
* @param fSynchronized true to set to synchronized, false otherwise
*/
public void setSynchronized(boolean fSynchronized)
{
m_flags.setSynchronized(fSynchronized);
}
// ----- accessor: native -------------------------------------------
/**
* Determine if the native attribute is set.
*
* @return true if native
*/
public boolean isNative()
{
return m_flags.isNative();
}
/**
* Set the native attribute.
*
* @param fNative true to set to native, false otherwise
*/
public void setNative(boolean fNative)
{
m_flags.setNative(fNative);
}
// ----- accessor: abstract -------------------------------------------
/**
* Determine if the abstract attribute is set.
*
* @return true if abstract
*/
public boolean isAbstract()
{
return m_flags.isAbstract();
}
/**
* Set the abstract attribute.
*
* @param fAbstract true to set to abstract, false otherwise
*/
public void setAbstract(boolean fAbstract)
{
m_flags.setAbstract(fAbstract);
}
// ----- accessor: bridge ----------------------------------------------
/**
* Determine if the bridge attribute is set.
*
* @return true if bridge
*/
public boolean isBridge()
{
return m_flags.isBridge();
}
/**
* Set the bridge attribute.
*
* @param fBridge true to set to bridge, false otherwise
*/
public void setBridge(boolean fBridge)
{
m_flags.setBridge(fBridge);
}
// ----- accessor: varargs ---------------------------------------------
/**
* Determine if the varargs attribute is set.
*
* @return true if varargs
*/
public boolean isVarArgs()
{
return m_flags.isVarArgs();
}
/**
* Set the varargs attribute.
*
* @param fVarArgs true to set to varargs, false otherwise
*/
public void setVarArgs(boolean fVarArgs)
{
m_flags.setVarArgs(fVarArgs);
}
// ----- accessor: strict ----------------------------------------------
/**
* Determine if the strict attribute is set.
*
* @return true if strict
*/
public boolean isStrict()
{
return m_flags.isStrict();
}
/**
* Set the strict attribute.
*
* @param fStrict true to set to strict, false otherwise
*/
public void setStrict(boolean fStrict)
{
m_flags.setStrict(fStrict);
}
// ----- accessor: attribute -------------------------------------------
/**
* Access a Java .class attribute structure.
*
* @param sName the attribute name
*
* @return the specified attribute or null if the attribute does not exist
*/
public Attribute getAttribute(String sName)
{
return (Attribute) m_tblAttribute.get(sName);
}
/**
* Add a Java .class attribute structure.
*
* @param sName the attribute name
*
* @return the new attribute
*/
public Attribute addAttribute(String sName)
{
Attribute attribute;
if (sName.equals(ATTR_CODE))
{
attribute = new CodeAttribute(this);
}
else if (sName.equals(ATTR_EXCEPTIONS))
{
attribute = new ExceptionsAttribute(this);
}
else if (sName.equals(ATTR_DEPRECATED))
{
attribute = new DeprecatedAttribute(this);
}
else if (sName.equals(ATTR_SYNTHETIC))
{
attribute = new SyntheticAttribute(this);
}
else if (sName.equals(ATTR_SIGNATURE))
{
attribute = new SignatureAttribute(this);
}
else if (sName.equals(ATTR_RTVISANNOT))
{
attribute = new RuntimeVisibleAnnotationsAttribute(this);
}
else if (sName.equals(ATTR_RTINVISANNOT))
{
attribute = new RuntimeInvisibleAnnotationsAttribute(this);
}
else if (sName.equals(ATTR_RTVISPARAMANNOT))
{
attribute = new RuntimeVisibleParameterAnnotationsAttribute(this);
}
else if (sName.equals(ATTR_RTINVISPARAMANNOT))
{
attribute = new RuntimeInvisibleParameterAnnotationsAttribute(this);
}
else if (sName.equals(ATTR_RTVISTANNOT))
{
attribute = new RuntimeVisibleTypeAnnotationsAttribute(this);
}
else if (sName.equals(ATTR_RTINVISTANNOT))
{
attribute = new RuntimeInvisibleTypeAnnotationsAttribute(this);
}
else if (sName.equals(ATTR_METHODPARAMS))
{
attribute = new MethodParametersAttribute(this);
}
else
{
attribute = new Attribute(this, sName);
}
m_tblAttribute.put(attribute.getIdentity(), attribute);
m_fModified = true;
return attribute;
}
/**
* Remove a attribute.
*
* @param sName the attribute name
*/
public void removeAttribute(String sName)
{
m_tblAttribute.remove(sName);
m_fModified = true;
}
/**
* Access the set of attributes.
*
* @return an enumeration of attributes (not attribute names)
*/
public Enumeration getAttributes()
{
return m_tblAttribute.elements();
}
// ----- accessor: attribute helpers -----------------------------------
/**
* Get the code attribute.
*
* @return the code attribute, creating one if necessary
*/
public CodeAttribute getCode()
{
CodeAttribute attr = (CodeAttribute) m_tblAttribute.get(ATTR_CODE);
return (attr == null ? (CodeAttribute) addAttribute(ATTR_CODE) : attr);
}
/**
* Determine if the method is deprecated.
*
* @return true if deprecated, false otherwise
*/
public boolean isDeprecated()
{
return m_tblAttribute.contains(ATTR_DEPRECATED);
}
/**
* Toggle if the method is deprecated.
*
* @param fDeprecated pass true to deprecate, false otherwise
*/
public void setDeprecated(boolean fDeprecated)
{
if (fDeprecated)
{
addAttribute(ATTR_DEPRECATED);
}
else
{
removeAttribute(ATTR_DEPRECATED);
}
}
/**
* Determine if the method is synthetic.
*
* @return true if synthetic, false otherwise
*/
public boolean isSynthetic()
{
return m_tblAttribute.contains(ATTR_SYNTHETIC) ||
m_flags.isSynthetic();
}
/**
* Toggle if the method is synthetic.
*
* @param fSynthetic pass true to set synthetic, false otherwise
*/
public void setSynthetic(boolean fSynthetic)
{
if (fSynthetic)
{
addAttribute(ATTR_SYNTHETIC);
}
else
{
removeAttribute(ATTR_SYNTHETIC);
}
}
/**
* Add an exception.
*
* @param sClz the class name of the exception
*/
public void addException(String sClz)
{
ExceptionsAttribute attr = (ExceptionsAttribute) getAttribute(ATTR_EXCEPTIONS);
if (attr == null)
{
attr = (ExceptionsAttribute) addAttribute(ATTR_EXCEPTIONS);
}
attr.addException(sClz);
}
/**
* Remove an exception.
*
* @param sClz the class name of the exception
*/
public void removeException(String sClz)
{
ExceptionsAttribute attr = (ExceptionsAttribute) m_tblAttribute.get(ATTR_EXCEPTIONS);
if (attr != null)
{
attr.removeException(sClz);
}
}
/**
* Access the set of exceptions.
*
* @return an enumeration of exception class names
*/
public Enumeration getExceptions()
{
ExceptionsAttribute attr = (ExceptionsAttribute) getAttribute(ATTR_EXCEPTIONS);
return attr == null ? NullImplementation.getEnumeration() : attr.getExceptions();
}
// ----- helpers --------------------------------------------------------
/**
* The class name if this method is from disassembly.
*
* @return the class name as it was found in the constant pool
*/
protected String getClassName()
{
return m_sClass;
}
/**
* Ensure a {@link MethodParametersAttribute} exists as method_info
* attribute.
*
* @param cParams the number of parameters defined by this method's signature
*
* @return a MethodParametersAttribute linked to this Method
*/
protected MethodParametersAttribute ensureMethodParameters(int cParams)
{
MethodParametersAttribute attrParams = (MethodParametersAttribute)
getAttribute(ATTR_METHODPARAMS);
if (attrParams == null)
{
attrParams = (MethodParametersAttribute) addAttribute(ATTR_METHODPARAMS);
attrParams.setParameterCount(cParams);
}
return attrParams;
}
// ----- constants ------------------------------------------------------
/**
* The name of this class.
*/
private static final String CLASS = "Method";
/**
* Access flags applicable to a method.
*/
public static final int ACC_METHOD = AccessFlags.ACC_PUBLIC |
AccessFlags.ACC_PRIVATE |
AccessFlags.ACC_PROTECTED |
AccessFlags.ACC_STATIC |
AccessFlags.ACC_FINAL |
AccessFlags.ACC_SYNCHRONIZED |
AccessFlags.ACC_BRIDGE |
AccessFlags.ACC_VARARGS |
AccessFlags.ACC_NATIVE |
AccessFlags.ACC_ABSTRACT |
AccessFlags.ACC_STRICT |
AccessFlags.ACC_SYNTHETIC;
// ----- data members ---------------------------------------------------
/**
* Whether this method is defined against an interface or a class.
*/
private final boolean f_fInterface;
/**
* The name of the class if this method is the result of disassembly.
*/
private String m_sClass;
/**
* The name of the method.
*/
private UtfConstant m_utfName;
/**
* The signature of the method.
*/
private UtfConstant m_utfSig;
/**
* The AccessFlags structure contained in the method.
*/
private AccessFlags m_flags = new AccessFlags()
{
@Override
protected void preassemble(ConstantPool pool)
{
if (f_fInterface)
{
if (isMaskSet(ACC_PROTECTED | ACC_FINAL | ACC_SYNCHRONIZED | ACC_NATIVE))
{
throw new IllegalStateException("Interface method " + this + " can not be "
+ "protected, final, synchronized, or native");
}
if (pool.getClassFile().getMajorVersion() < 52)
{
setPublic();
setAbstract(true);
}
else if (!isPublic() && !isPrivate())
{
throw new IllegalStateException("Interface method " + this + " must be "
+ "either public or private");
}
}
if (isAbstract() && isMaskSet(ACC_PRIVATE | ACC_STATIC | ACC_FINAL |
ACC_SYNCHRONIZED | ACC_NATIVE | ACC_STRICT))
{
throw new IllegalStateException("Abstract Method " + this + " can not be "
+ "private, static, final, synchronized, native, or strict");
}
super.preassemble(pool);
}
};
/**
* The Attribute structures contained in the method.
*/
private StringTable m_tblAttribute = new StringTable();
/**
* Tracks changes to the method.
*/
private boolean m_fModified;
}
|
apache/ignite-3 | 35,998 | modules/client/src/test/java/org/apache/ignite/client/PartitionAwarenessTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.client;
import static org.apache.ignite.internal.lang.IgniteStringFormatter.format;
import static org.apache.ignite.internal.testframework.matchers.CompletableFutureMatcher.willBe;
import static org.apache.ignite.internal.util.CompletableFutures.nullCompletedFuture;
import static org.apache.ignite.internal.util.IgniteUtils.closeAll;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.SubmissionPublisher;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.apache.ignite.Ignite;
import org.apache.ignite.client.AbstractClientTableTest.PersonPojo;
import org.apache.ignite.client.fakes.FakeIgnite;
import org.apache.ignite.client.fakes.FakeIgniteQueryProcessor;
import org.apache.ignite.client.fakes.FakeIgniteTables;
import org.apache.ignite.client.fakes.FakeInternalTable;
import org.apache.ignite.client.handler.FakePlacementDriver;
import org.apache.ignite.compute.IgniteCompute;
import org.apache.ignite.compute.JobDescriptor;
import org.apache.ignite.compute.JobTarget;
import org.apache.ignite.internal.catalog.Catalog;
import org.apache.ignite.internal.catalog.descriptors.CatalogTableDescriptor;
import org.apache.ignite.internal.client.ReliableChannel;
import org.apache.ignite.internal.client.TcpIgniteClient;
import org.apache.ignite.internal.client.sql.ClientSql;
import org.apache.ignite.internal.client.sql.PartitionMappingProvider;
import org.apache.ignite.internal.client.tx.ClientLazyTransaction;
import org.apache.ignite.internal.streamer.SimplePublisher;
import org.apache.ignite.internal.table.TableViewInternal;
import org.apache.ignite.internal.testframework.IgniteTestUtils;
import org.apache.ignite.sql.ResultSet;
import org.apache.ignite.table.DataStreamerItem;
import org.apache.ignite.table.DataStreamerOptions;
import org.apache.ignite.table.DataStreamerReceiver;
import org.apache.ignite.table.DataStreamerReceiverContext;
import org.apache.ignite.table.KeyValueView;
import org.apache.ignite.table.ReceiverDescriptor;
import org.apache.ignite.table.RecordView;
import org.apache.ignite.table.Table;
import org.apache.ignite.table.Tuple;
import org.apache.ignite.table.mapper.Mapper;
import org.apache.ignite.tx.Transaction;
import org.jetbrains.annotations.Nullable;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
/**
* Tests partition awareness.
*/
public class PartitionAwarenessTest extends AbstractClientTest {
private static final String nodeKey0 = "server-2";
private static final String nodeKey1 = "server-2";
private static final String nodeKey2 = "server-1";
private static final String nodeKey3 = "server-2";
private static TestServer testServer2;
private static FakeIgnite server2;
private static IgniteClient client2;
private volatile @Nullable String lastOp;
private volatile @Nullable String lastOpServerName;
private static final AtomicInteger nextTableId = new AtomicInteger(101);
/**
* Before all.
*/
@BeforeAll
public static void startServer2() {
server2 = new FakeIgnite("server-2");
testServer2 = new TestServer(0, server2, null, null, "server-2", clusterId, null, null);
var clientBuilder = IgniteClient.builder()
.addresses("127.0.0.1:" + serverPort, "127.0.0.1:" + testServer2.port())
.heartbeatInterval(200);
client2 = clientBuilder.build();
}
/**
* After all.
*/
@AfterAll
public static void stopServer2() throws Exception {
closeAll(client2, testServer2);
}
@BeforeEach
public void initReplicas() throws InterruptedException {
dropTables(server2);
initPrimaryReplicas(null);
assertTrue(IgniteTestUtils.waitForCondition(() -> client2.connections().size() == 2, 3000));
}
@Test
public void testGetTupleRoutesRequestToPrimaryNode() {
RecordView<Tuple> recordView = defaultTable().recordView();
assertOpOnNode(nodeKey0, "get", tx -> recordView.get(tx, Tuple.create().set("ID", 0L)));
assertOpOnNode(nodeKey1, "get", tx -> recordView.get(tx, Tuple.create().set("ID", 1L)));
assertOpOnNode(nodeKey2, "get", tx -> recordView.get(tx, Tuple.create().set("ID", 2L)));
assertOpOnNode(nodeKey3, "get", tx -> recordView.get(tx, Tuple.create().set("ID", 3L)));
}
@Test
public void testGetRecordRoutesRequestToPrimaryNode() {
RecordView<PersonPojo> pojoView = defaultTable().recordView(Mapper.of(PersonPojo.class));
assertOpOnNode(nodeKey0, "get", tx -> pojoView.get(tx, new PersonPojo(0L)));
assertOpOnNode(nodeKey1, "get", tx -> pojoView.get(tx, new PersonPojo(1L)));
assertOpOnNode(nodeKey2, "get", tx -> pojoView.get(tx, new PersonPojo(2L)));
assertOpOnNode(nodeKey3, "get", tx -> pojoView.get(tx, new PersonPojo(3L)));
}
@Test
public void testGetKeyValueRoutesRequestToPrimaryNode() {
KeyValueView<Long, String> kvView = defaultTable().keyValueView(Mapper.of(Long.class), Mapper.of(String.class));
assertOpOnNode(nodeKey0, "get", tx -> kvView.get(tx, 0L));
assertOpOnNode(nodeKey1, "get", tx -> kvView.get(tx, 1L));
assertOpOnNode(nodeKey2, "get", tx -> kvView.get(tx, 2L));
assertOpOnNode(nodeKey3, "get", tx -> kvView.get(tx, 3L));
}
@Test
public void testGetKeyValueBinaryRoutesRequestToPrimaryNode() {
KeyValueView<Tuple, Tuple> kvView = defaultTable().keyValueView();
assertOpOnNode(nodeKey0, "get", tx -> kvView.get(tx, Tuple.create().set("ID", 0L)));
assertOpOnNode(nodeKey1, "get", tx -> kvView.get(tx, Tuple.create().set("ID", 1L)));
assertOpOnNode(nodeKey2, "get", tx -> kvView.get(tx, Tuple.create().set("ID", 2L)));
assertOpOnNode(nodeKey3, "get", tx -> kvView.get(tx, Tuple.create().set("ID", 3L)));
}
@Test
public void testNonNullTxDisablesPartitionAwareness() {
RecordView<Tuple> recordView = defaultTable().recordView();
var tx = (ClientLazyTransaction) client2.transactions().begin();
client2.sql().execute(tx, "SELECT 1").close(); // Force lazy tx init.
String expectedNode = tx.nodeName();
assertNotNull(expectedNode);
assertOpOnNode(expectedNode, "get", tx2 -> recordView.get(tx, Tuple.create().set("ID", 0L)));
assertOpOnNode(expectedNode, "get", tx2 -> recordView.get(tx, Tuple.create().set("ID", 1L)));
assertOpOnNode(expectedNode, "get", tx2 -> recordView.get(tx, Tuple.create().set("ID", 2L)));
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void testClientReceivesPartitionAssignmentUpdates(boolean useHeartbeat) throws InterruptedException {
ReliableChannel ch = ((TcpIgniteClient) client2).channel();
// Check default assignment.
RecordView<Tuple> recordView = defaultTable().recordView();
assertOpOnNode(nodeKey1, "get", tx -> recordView.get(tx, Tuple.create().set("ID", 1L)));
assertOpOnNode(nodeKey2, "get", tx -> recordView.get(tx, Tuple.create().set("ID", 2L)));
// Update partition assignment.
var oldTs = ch.partitionAssignmentTimestamp();
initPrimaryReplicas(reversedReplicas());
if (useHeartbeat) {
// Wait for heartbeat message to receive change notification flag.
assertTrue(IgniteTestUtils.waitForCondition(() -> ch.partitionAssignmentTimestamp() > oldTs, 3000));
} else {
// Perform requests to receive change notification flag.
int maxRequests = 50;
while (ch.partitionAssignmentTimestamp() <= oldTs && maxRequests-- > 0) {
client2.tables().tables();
}
assertThat("Failed to receive assignment update", maxRequests, greaterThan(0));
}
// Check new assignment.
assertThat(ch.partitionAssignmentTimestamp(), greaterThan(oldTs));
assertOpOnNode(nodeKey2, "get", tx -> recordView.get(tx, Tuple.create().set("ID", 1L)));
assertOpOnNode(nodeKey1, "get", tx -> recordView.get(tx, Tuple.create().set("ID", 2L)));
}
@Test
public void testCustomColocationKey() {
RecordView<Tuple> recordView = table(FakeIgniteTables.TABLE_COLOCATION_KEY).recordView();
assertOpOnNode("server-2", "get", tx -> recordView.get(tx, Tuple.create().set("ID", 0).set("COLO1", "0").set("COLO2", 4L)));
assertOpOnNode("server-1", "get", tx -> recordView.get(tx, Tuple.create().set("ID", 0).set("COLO1", "0").set("COLO2", 8L)));
}
@Test
public void testCompositeKey() {
RecordView<Tuple> recordView = table(FakeIgniteTables.TABLE_COMPOSITE_KEY).recordView();
assertOpOnNode("server-2", "get", tx -> recordView.get(tx, Tuple.create().set("ID1", 0).set("ID2", "0")));
assertOpOnNode("server-1", "get", tx -> recordView.get(tx, Tuple.create().set("ID1", 1).set("ID2", "0")));
assertOpOnNode("server-2", "get", tx -> recordView.get(tx, Tuple.create().set("ID1", 0).set("ID2", "1")));
assertOpOnNode("server-1", "get", tx -> recordView.get(tx, Tuple.create().set("ID1", 1).set("ID2", "1")));
assertOpOnNode("server-2", "get", tx -> recordView.get(tx, Tuple.create().set("ID1", 1).set("ID2", "2")));
}
@Test
public void testAllRecordViewOperations() {
RecordView<PersonPojo> pojoView = defaultTable().recordView(
Mapper.of(PersonPojo.class));
var t1 = new PersonPojo(0L);
var t2 = new PersonPojo(1L);
assertOpOnNode(nodeKey0, "insert", tx -> pojoView.insert(tx, t1));
assertOpOnNode(nodeKey1, "insert", tx -> pojoView.insert(tx, t2));
assertOpOnNode(nodeKey0, "insertAll", tx -> pojoView.insertAll(tx, List.of(t1)));
assertOpOnNode(nodeKey1, "insertAll", tx -> pojoView.insertAll(tx, List.of(t2)));
assertOpOnNode(nodeKey0, "upsert", tx -> pojoView.upsert(tx, t1));
assertOpOnNode(nodeKey1, "upsert", tx -> pojoView.upsert(tx, t2));
assertOpOnNode(nodeKey0, "upsertAll", tx -> pojoView.upsertAll(tx, List.of(t1)));
assertOpOnNode(nodeKey1, "upsertAll", tx -> pojoView.upsertAll(tx, List.of(t2)));
assertOpOnNode(nodeKey0, "get", tx -> pojoView.get(tx, t1));
assertOpOnNode(nodeKey1, "get", tx -> pojoView.get(tx, t2));
assertOpOnNode(nodeKey0, "getAll", tx -> pojoView.getAll(tx, List.of(t1)));
assertOpOnNode(nodeKey1, "getAll", tx -> pojoView.getAll(tx, List.of(t2)));
assertOpOnNode(nodeKey0, "getAndUpsert", tx -> pojoView.getAndUpsert(tx, t1));
assertOpOnNode(nodeKey1, "getAndUpsert", tx -> pojoView.getAndUpsert(tx, t2));
assertOpOnNode(nodeKey0, "getAndReplace", tx -> pojoView.getAndReplace(tx, t1));
assertOpOnNode(nodeKey1, "getAndReplace", tx -> pojoView.getAndReplace(tx, t2));
assertOpOnNode(nodeKey0, "getAndDelete", tx -> pojoView.getAndDelete(tx, t1));
assertOpOnNode(nodeKey1, "getAndDelete", tx -> pojoView.getAndDelete(tx, t2));
assertOpOnNode(nodeKey0, "replace", tx -> pojoView.replace(tx, t1));
assertOpOnNode(nodeKey1, "replace", tx -> pojoView.replace(tx, t2));
assertOpOnNode(nodeKey0, "replace", tx -> pojoView.replace(tx, t1, t1));
assertOpOnNode(nodeKey1, "replace", tx -> pojoView.replace(tx, t2, t2));
assertOpOnNode(nodeKey0, "delete", tx -> pojoView.delete(tx, t1));
assertOpOnNode(nodeKey1, "delete", tx -> pojoView.delete(tx, t2));
assertOpOnNode(nodeKey0, "deleteExact", tx -> pojoView.deleteExact(tx, t1));
assertOpOnNode(nodeKey1, "deleteExact", tx -> pojoView.deleteExact(tx, t2));
assertOpOnNode(nodeKey0, "deleteAll", tx -> pojoView.deleteAll(tx, List.of(t1)));
assertOpOnNode(nodeKey1, "deleteAll", tx -> pojoView.deleteAll(tx, List.of(t2)));
assertOpOnNode(nodeKey0, "deleteAllExact", tx -> pojoView.deleteAllExact(tx, List.of(t1)));
assertOpOnNode(nodeKey1, "deleteAllExact", tx -> pojoView.deleteAllExact(tx, List.of(t2)));
}
@Test
public void testAllRecordBinaryViewOperations() {
RecordView<Tuple> recordView = defaultTable().recordView();
Tuple t1 = Tuple.create().set("ID", 1L);
Tuple t2 = Tuple.create().set("ID", 2L);
assertOpOnNode(nodeKey1, "insert", tx -> recordView.insert(tx, t1));
assertOpOnNode(nodeKey2, "insert", tx -> recordView.insert(tx, t2));
assertOpOnNode(nodeKey1, "insertAll", tx -> recordView.insertAll(tx, List.of(t1)));
assertOpOnNode(nodeKey2, "insertAll", tx -> recordView.insertAll(tx, List.of(t2)));
assertOpOnNode(nodeKey1, "upsert", tx -> recordView.upsert(tx, t1));
assertOpOnNode(nodeKey2, "upsert", tx -> recordView.upsert(tx, t2));
assertOpOnNode(nodeKey1, "upsertAll", tx -> recordView.upsertAll(tx, List.of(t1)));
assertOpOnNode(nodeKey2, "upsertAll", tx -> recordView.upsertAll(tx, List.of(t2)));
assertOpOnNode(nodeKey1, "get", tx -> recordView.get(tx, t1));
assertOpOnNode(nodeKey2, "get", tx -> recordView.get(tx, t2));
assertOpOnNode(nodeKey1, "getAll", tx -> recordView.getAll(tx, List.of(t1)));
assertOpOnNode(nodeKey2, "getAll", tx -> recordView.getAll(tx, List.of(t2)));
assertOpOnNode(nodeKey1, "getAndUpsert", tx -> recordView.getAndUpsert(tx, t1));
assertOpOnNode(nodeKey2, "getAndUpsert", tx -> recordView.getAndUpsert(tx, t2));
assertOpOnNode(nodeKey1, "getAndReplace", tx -> recordView.getAndReplace(tx, t1));
assertOpOnNode(nodeKey2, "getAndReplace", tx -> recordView.getAndReplace(tx, t2));
assertOpOnNode(nodeKey1, "getAndDelete", tx -> recordView.getAndDelete(tx, t1));
assertOpOnNode(nodeKey2, "getAndDelete", tx -> recordView.getAndDelete(tx, t2));
assertOpOnNode(nodeKey1, "replace", tx -> recordView.replace(tx, t1));
assertOpOnNode(nodeKey2, "replace", tx -> recordView.replace(tx, t2));
assertOpOnNode(nodeKey1, "replace", tx -> recordView.replace(tx, t1, t1));
assertOpOnNode(nodeKey2, "replace", tx -> recordView.replace(tx, t2, t2));
assertOpOnNode(nodeKey1, "delete", tx -> recordView.delete(tx, t1));
assertOpOnNode(nodeKey2, "delete", tx -> recordView.delete(tx, t2));
assertOpOnNode(nodeKey1, "deleteExact", tx -> recordView.deleteExact(tx, t1));
assertOpOnNode(nodeKey2, "deleteExact", tx -> recordView.deleteExact(tx, t2));
assertOpOnNode(nodeKey1, "deleteAll", tx -> recordView.deleteAll(tx, List.of(t1)));
assertOpOnNode(nodeKey2, "deleteAll", tx -> recordView.deleteAll(tx, List.of(t2)));
assertOpOnNode(nodeKey1, "deleteAllExact", tx -> recordView.deleteAllExact(tx, List.of(t1)));
assertOpOnNode(nodeKey2, "deleteAllExact", tx -> recordView.deleteAllExact(tx, List.of(t2)));
}
@Test
public void testAllKeyValueViewOperations() {
KeyValueView<Long, String> kvView = defaultTable().keyValueView(Mapper.of(Long.class), Mapper.of(String.class));
var k1 = 1L;
var k2 = 2L;
var v = "v";
assertOpOnNode(nodeKey1, "insert", tx -> kvView.putIfAbsent(tx, k1, v));
assertOpOnNode(nodeKey2, "insert", tx -> kvView.putIfAbsent(tx, k2, v));
assertOpOnNode(nodeKey1, "upsert", tx -> kvView.put(tx, k1, v));
assertOpOnNode(nodeKey2, "upsert", tx -> kvView.put(tx, k2, v));
assertOpOnNode(nodeKey1, "upsertAll", tx -> kvView.putAll(tx, Map.of(k1, v)));
assertOpOnNode(nodeKey2, "upsertAll", tx -> kvView.putAll(tx, Map.of(k2, v)));
assertOpOnNode(nodeKey1, "get", tx -> kvView.get(tx, k1));
assertOpOnNode(nodeKey2, "get", tx -> kvView.get(tx, k2));
assertOpOnNode(nodeKey1, "get", tx -> kvView.contains(tx, k1));
assertOpOnNode(nodeKey2, "get", tx -> kvView.contains(tx, k2));
assertOpOnNode(nodeKey1, "getAll", tx -> kvView.getAll(tx, List.of(k1)));
assertOpOnNode(nodeKey2, "getAll", tx -> kvView.getAll(tx, List.of(k2)));
assertOpOnNode(nodeKey1, "getAndUpsert", tx -> kvView.getAndPut(tx, k1, v));
assertOpOnNode(nodeKey2, "getAndUpsert", tx -> kvView.getAndPut(tx, k2, v));
assertOpOnNode(nodeKey1, "getAndReplace", tx -> kvView.getAndReplace(tx, k1, v));
assertOpOnNode(nodeKey2, "getAndReplace", tx -> kvView.getAndReplace(tx, k2, v));
assertOpOnNode(nodeKey1, "getAndDelete", tx -> kvView.getAndRemove(tx, k1));
assertOpOnNode(nodeKey2, "getAndDelete", tx -> kvView.getAndRemove(tx, k2));
assertOpOnNode(nodeKey1, "replace", tx -> kvView.replace(tx, k1, v));
assertOpOnNode(nodeKey2, "replace", tx -> kvView.replace(tx, k2, v));
assertOpOnNode(nodeKey1, "replace", tx -> kvView.replace(tx, k1, v, v));
assertOpOnNode(nodeKey2, "replace", tx -> kvView.replace(tx, k2, v, v));
assertOpOnNode(nodeKey1, "delete", tx -> kvView.remove(tx, k1));
assertOpOnNode(nodeKey2, "delete", tx -> kvView.remove(tx, k2));
assertOpOnNode(nodeKey1, "deleteExact", tx -> kvView.remove(tx, k1, v));
assertOpOnNode(nodeKey2, "deleteExact", tx -> kvView.remove(tx, k2, v));
assertOpOnNode(nodeKey1, "deleteAll", tx -> kvView.removeAll(tx, List.of(k1)));
assertOpOnNode(nodeKey2, "deleteAll", tx -> kvView.removeAll(tx, List.of(k2)));
}
@Test
public void testAllKeyValueBinaryViewOperations() {
KeyValueView<Tuple, Tuple> kvView = defaultTable().keyValueView();
Tuple t1 = Tuple.create().set("ID", 1L);
Tuple t2 = Tuple.create().set("ID", 2L);
Tuple val = Tuple.create();
assertOpOnNode(nodeKey1, "insert", tx -> kvView.putIfAbsent(tx, t1, val));
assertOpOnNode(nodeKey2, "insert", tx -> kvView.putIfAbsent(tx, t2, val));
assertOpOnNode(nodeKey1, "upsert", tx -> kvView.put(tx, t1, val));
assertOpOnNode(nodeKey2, "upsert", tx -> kvView.put(tx, t2, val));
assertOpOnNode(nodeKey1, "upsertAll", tx -> kvView.putAll(tx, Map.of(t1, val)));
assertOpOnNode(nodeKey2, "upsertAll", tx -> kvView.putAll(tx, Map.of(t2, val)));
assertOpOnNode(nodeKey1, "get", tx -> kvView.get(tx, t1));
assertOpOnNode(nodeKey2, "get", tx -> kvView.get(tx, t2));
assertOpOnNode(nodeKey1, "get", tx -> kvView.contains(tx, t1));
assertOpOnNode(nodeKey2, "get", tx -> kvView.contains(tx, t2));
assertOpOnNode(nodeKey1, "getAll", tx -> kvView.getAll(tx, List.of(t1)));
assertOpOnNode(nodeKey2, "getAll", tx -> kvView.getAll(tx, List.of(t2)));
assertOpOnNode(nodeKey1, "getAndUpsert", tx -> kvView.getAndPut(tx, t1, val));
assertOpOnNode(nodeKey2, "getAndUpsert", tx -> kvView.getAndPut(tx, t2, val));
assertOpOnNode(nodeKey1, "getAndReplace", tx -> kvView.getAndReplace(tx, t1, val));
assertOpOnNode(nodeKey2, "getAndReplace", tx -> kvView.getAndReplace(tx, t2, val));
assertOpOnNode(nodeKey1, "getAndDelete", tx -> kvView.getAndRemove(tx, t1));
assertOpOnNode(nodeKey2, "getAndDelete", tx -> kvView.getAndRemove(tx, t2));
assertOpOnNode(nodeKey1, "replace", tx -> kvView.replace(tx, t1, val));
assertOpOnNode(nodeKey2, "replace", tx -> kvView.replace(tx, t2, val));
assertOpOnNode(nodeKey1, "replace", tx -> kvView.replace(tx, t1, val, val));
assertOpOnNode(nodeKey2, "replace", tx -> kvView.replace(tx, t2, val, val));
assertOpOnNode(nodeKey1, "delete", tx -> kvView.remove(tx, t1));
assertOpOnNode(nodeKey2, "delete", tx -> kvView.remove(tx, t2));
assertOpOnNode(nodeKey1, "deleteExact", tx -> kvView.remove(tx, t1, val));
assertOpOnNode(nodeKey2, "deleteExact", tx -> kvView.remove(tx, t2, val));
assertOpOnNode(nodeKey1, "deleteAll", tx -> kvView.removeAll(tx, List.of(t1)));
assertOpOnNode(nodeKey2, "deleteAll", tx -> kvView.removeAll(tx, List.of(t2)));
}
@Test
public void testExecuteColocatedTupleKeyRoutesRequestToPrimaryNode() {
Table table = defaultTable();
Tuple t1 = Tuple.create().set("ID", 1L);
Tuple t2 = Tuple.create().set("ID", 2L);
JobDescriptor<Object, String> job = JobDescriptor.<Object, String>builder("job").build();
assertThat(compute().executeAsync(JobTarget.colocated(table.qualifiedName(), t1), job, null), willBe(nodeKey1));
assertThat(compute().executeAsync(JobTarget.colocated(table.qualifiedName(), t2), job, null), willBe(nodeKey2));
}
@Test
public void testExecuteColocatedObjectKeyRoutesRequestToPrimaryNode() {
var mapper = Mapper.of(Long.class);
Table table = defaultTable();
JobDescriptor<Object, String> job = JobDescriptor.<Object, String>builder("job").build();
assertThat(compute().executeAsync(JobTarget.colocated(table.qualifiedName(), 1L, mapper), job, null), willBe(nodeKey1));
assertThat(compute().executeAsync(JobTarget.colocated(table.qualifiedName(), 2L, mapper), job, null), willBe(nodeKey2));
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void testDataStreamerRecordBinaryView(boolean withReceiver) {
RecordView<Tuple> recordView = defaultTable().recordView();
Consumer<Tuple> stream = t -> {
CompletableFuture<Void> fut;
try (SimplePublisher<Tuple> publisher = new SimplePublisher<>()) {
fut = withReceiver
? recordView.streamData(publisher, DataStreamerItem::get, x -> 0, receiver(), null, null, null)
: recordView.streamData(publisher, null);
publisher.submit(t);
}
fut.join();
};
String expectedOp = withReceiver ? "upsert" : "updateAll";
assertOpOnNode(nodeKey0, expectedOp, tx -> stream.accept(Tuple.create().set("ID", 0L)));
assertOpOnNode(nodeKey1, expectedOp, tx -> stream.accept(Tuple.create().set("ID", 1L)));
assertOpOnNode(nodeKey2, expectedOp, tx -> stream.accept(Tuple.create().set("ID", 2L)));
assertOpOnNode(nodeKey3, expectedOp, tx -> stream.accept(Tuple.create().set("ID", 3L)));
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void testDataStreamerRecordView(boolean withReceiver) {
RecordView<PersonPojo> pojoView = defaultTable().recordView(Mapper.of(PersonPojo.class));
Consumer<PersonPojo> stream = t -> {
CompletableFuture<Void> fut;
try (SimplePublisher<PersonPojo> publisher = new SimplePublisher<>()) {
fut = withReceiver
? pojoView.streamData(publisher, DataStreamerItem::get, x -> 0, receiver(), null, null, null)
: pojoView.streamData(publisher, null);
publisher.submit(t);
}
fut.join();
};
String expectedOp = withReceiver ? "upsert" : "updateAll";
assertOpOnNode(nodeKey0, expectedOp, tx -> stream.accept(new PersonPojo(0L)));
assertOpOnNode(nodeKey1, expectedOp, tx -> stream.accept(new PersonPojo(1L)));
assertOpOnNode(nodeKey2, expectedOp, tx -> stream.accept(new PersonPojo(2L)));
assertOpOnNode(nodeKey3, expectedOp, tx -> stream.accept(new PersonPojo(3L)));
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void testDataStreamerKeyValueBinaryView(boolean withReceiver) {
KeyValueView<Tuple, Tuple> recordView = defaultTable().keyValueView();
Consumer<Tuple> stream = t -> {
CompletableFuture<Void> fut;
try (SimplePublisher<Entry<Tuple, Tuple>> publisher = new SimplePublisher<>()) {
fut = withReceiver
? recordView.streamData(publisher, DataStreamerItem::get, x -> 0, receiver(), null, null, null)
: recordView.streamData(publisher, null);
publisher.submit(Map.entry(t, Tuple.create()));
}
fut.join();
};
String expectedOp = withReceiver ? "upsert" : "updateAll";
assertOpOnNode(nodeKey0, expectedOp, tx -> stream.accept(Tuple.create().set("ID", 0L)));
assertOpOnNode(nodeKey1, expectedOp, tx -> stream.accept(Tuple.create().set("ID", 1L)));
assertOpOnNode(nodeKey2, expectedOp, tx -> stream.accept(Tuple.create().set("ID", 2L)));
assertOpOnNode(nodeKey3, expectedOp, tx -> stream.accept(Tuple.create().set("ID", 3L)));
}
@ParameterizedTest
@ValueSource(booleans = {true})
public void testDataStreamerKeyValueView(boolean withReceiver) {
KeyValueView<Long, String> kvView = defaultTable().keyValueView(Mapper.of(Long.class), Mapper.of(String.class));
Consumer<Long> stream = t -> {
CompletableFuture<Void> fut;
try (SimplePublisher<Entry<Long, String>> publisher = new SimplePublisher<>()) {
fut = withReceiver
? kvView.streamData(publisher, DataStreamerItem::get, x -> 0, receiver(), null, null, null)
: kvView.streamData(publisher, null);
publisher.submit(Map.entry(t, t.toString()));
}
fut.join();
};
String expectedOp = withReceiver ? "upsert" : "updateAll";
assertOpOnNode(nodeKey0, expectedOp, tx -> stream.accept(0L));
assertOpOnNode(nodeKey1, expectedOp, tx -> stream.accept(1L));
assertOpOnNode(nodeKey2, expectedOp, tx -> stream.accept(2L));
assertOpOnNode(nodeKey3, expectedOp, tx -> stream.accept(3L));
}
@Test
public void testDataStreamerReceivesPartitionAssignmentUpdates() {
DataStreamerOptions options = DataStreamerOptions.builder()
.pageSize(1)
.perPartitionParallelOperations(1)
.autoFlushInterval(50)
.build();
CompletableFuture<Void> fut;
RecordView<Tuple> recordView = defaultTable().recordView();
try (SubmissionPublisher<DataStreamerItem<Tuple>> publisher = new SubmissionPublisher<>()) {
fut = recordView.streamData(publisher, options);
Consumer<Long> submit = id -> {
try {
lastOpServerName = null;
publisher.submit(DataStreamerItem.of(Tuple.create().set("ID", id)));
assertTrue(IgniteTestUtils.waitForCondition(() -> lastOpServerName != null, 1000));
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
};
assertOpOnNode(nodeKey1, "updateAll", tx -> submit.accept(1L));
assertOpOnNode(nodeKey2, "updateAll", tx -> submit.accept(2L));
// Update partition assignment.
initPrimaryReplicas(reversedReplicas());
// Send some batches so that the client receives updated assignment.
for (long i = 0; i < 10; i++) {
submit.accept(i);
}
// Check updated assignment.
assertOpOnNode(nodeKey2, "updateAll", tx -> submit.accept(1L));
assertOpOnNode(nodeKey1, "updateAll", tx -> submit.accept(2L));
}
fut.join();
}
@Test
public void testAssignmentUnavailablePutGet() {
initPrimaryReplicas(nullReplicas());
RecordView<Tuple> recordView = defaultTable().recordView();
recordView.upsert(null, Tuple.create().set("ID", 123L));
Tuple res = recordView.get(null, Tuple.create().set("ID", 123L));
assertNotNull(res);
}
@Test
public void testAssignmentUnavailableStreamer() {
initPrimaryReplicas(nullReplicas());
DataStreamerOptions options = DataStreamerOptions.builder()
.pageSize(1)
.perPartitionParallelOperations(1)
.autoFlushInterval(50)
.build();
CompletableFuture<Void> fut;
RecordView<Tuple> recordView = defaultTable().recordView();
try (SubmissionPublisher<DataStreamerItem<Tuple>> publisher = new SubmissionPublisher<>()) {
fut = recordView.streamData(publisher, options);
for (long i = 0; i < 100; i++) {
publisher.submit(DataStreamerItem.of(Tuple.create().set("ID", i)));
}
}
assertDoesNotThrow(fut::join);
}
@Test
public void testSqlRoutesRequestToPrimaryNode() throws InterruptedException {
int tableId = 100500;
String name = "DUMMY";
// Lease start time must be the same on both servers.
long leaseStartTime = server.clock().nowLong();
prepareServer(server, tableId, name, leaseStartTime);
prepareServer(server2, tableId, name, leaseStartTime);
executeSql(null, 0);
assertTrue(IgniteTestUtils.waitForCondition(() -> {
executeSql(null, 0);
return ((ClientSql) client2.sql()).partitionAwarenessCachedMetas().stream().allMatch(PartitionMappingProvider::ready);
}, 2_000));
assertOpOnNode(nodeKey0, null, tx -> executeSql(tx, 0L));
assertOpOnNode(nodeKey1, null, tx -> executeSql(tx, 1L));
assertOpOnNode(nodeKey2, null, tx -> executeSql(tx, 2L));
assertOpOnNode(nodeKey3, null, tx -> executeSql(tx, 3L));
}
private void prepareServer(FakeIgnite server, int tableId, String name, long leaseStartTime) {
initPrimaryReplicas(server.placementDriver(), null, leaseStartTime, tableId);
createTable(server, tableId, name);
((FakeIgniteQueryProcessor) server.queryEngine()).setDataAccessListener((nodeName) -> lastOpServerName = nodeName);
}
private void executeSql(@Nullable Transaction tx, long id) {
try (ResultSet<?> ignored = client2.sql().execute(tx, format("SELECT SINGLE COLUMN PA", DEFAULT_TABLE), id)) {
// no-op
}
}
private void assertOpOnNode(String expectedNode, String expectedOp, Consumer<Transaction> op) {
assertOpOnNodeNoTx(expectedNode, expectedOp, op);
assertOpOnNodeWithTx(expectedNode, expectedOp, op);
}
private void assertOpOnNodeNoTx(String expectedNode, String expectedOp, Consumer<Transaction> op) {
lastOpServerName = null;
lastOp = null;
op.accept(null);
assertEquals(expectedOp, lastOp);
assertEquals(expectedNode, lastOpServerName, "Operation " + expectedOp + " was not executed on expected node");
}
private void assertOpOnNodeWithTx(String expectedNode, String expectedOp, Consumer<Transaction> op) {
lastOpServerName = null;
lastOp = null;
Transaction tx = client.transactions().begin();
op.accept(null);
tx.commit();
assertEquals(expectedOp, lastOp);
assertEquals(expectedNode, lastOpServerName, "Operation " + expectedOp + " was not executed on expected node with transaction");
}
private Table defaultTable() {
return table(DEFAULT_TABLE);
}
private Table table(String name) {
// Create table on both servers with the same ID.
int tableId = nextTableId.getAndIncrement();
createTable(server, tableId, name);
createTable(server2, tableId, name);
return client2.tables().table(name);
}
private static IgniteCompute compute() {
return client2.compute();
}
private void createTable(Ignite ignite, int id, String name) {
FakeIgniteTables tables = (FakeIgniteTables) ignite.tables();
TableViewInternal tableView = tables.createTable(name, id);
((FakeInternalTable) tableView.internalTable()).setDataAccessListener((op, data) -> {
lastOp = op;
lastOpServerName = ignite.name();
});
}
private static void initPrimaryReplicas(@Nullable List<String> replicas) {
// Lease start time must be the same on both servers.
long leaseStartTime = testServer.clock().nowLong();
initPrimaryReplicas(testServer.placementDriver(), replicas, leaseStartTime);
initPrimaryReplicas(testServer2.placementDriver(), replicas, leaseStartTime);
}
private static void initPrimaryReplicas(FakePlacementDriver placementDriver, @Nullable List<String> replicas, long leaseStartTime) {
initPrimaryReplicas(placementDriver, replicas, leaseStartTime, nextTableId.get() - 1);
}
private static void initPrimaryReplicas(
FakePlacementDriver placementDriver,
@Nullable List<String> replicas,
long leaseStartTime,
int tableId
) {
if (replicas == null) {
replicas = defaultReplicas();
}
placementDriver.returnError(false);
placementDriver.setReplicas(replicas, tableId, zoneId(tableId), leaseStartTime);
}
private static int zoneId(int tableId) {
Catalog catalog = testServer.catalogService().activeCatalog(Long.MAX_VALUE);
assertThat(catalog, is(notNullValue()));
CatalogTableDescriptor table = catalog.table(tableId);
assertThat(table, is(notNullValue()));
return table.zoneId();
}
private static List<String> defaultReplicas() {
return List.of(testServer.nodeName(), testServer2.nodeName(), testServer.nodeName(), testServer2.nodeName());
}
private static List<String> reversedReplicas() {
return List.of(testServer2.nodeName(), testServer.nodeName(), testServer2.nodeName(), testServer.nodeName());
}
private static List<String> nullReplicas() {
return IntStream.range(0, 4).mapToObj(i -> (String) null).collect(Collectors.toList());
}
private static <A> ReceiverDescriptor<A> receiver() {
return (ReceiverDescriptor<A>) ReceiverDescriptor.builder(TestReceiver.class).build();
}
private static class TestReceiver implements DataStreamerReceiver<Object, Object, Object> {
@SuppressWarnings("resource")
@Override
public CompletableFuture<List<Object>> receive(List<Object> page, DataStreamerReceiverContext ctx, Object arg) {
ctx.ignite().tables().table(DEFAULT_TABLE).recordView().upsert(null, Tuple.create().set("ID", 0L));
return nullCompletedFuture();
}
}
}
|
googleapis/google-cloud-java | 36,127 | java-network-management/proto-google-cloud-network-management-v1/src/main/java/com/google/cloud/networkmanagement/v1/UpdateVpcFlowLogsConfigRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/networkmanagement/v1/vpc_flow_logs.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.networkmanagement.v1;
/**
*
*
* <pre>
* Request for the `UpdateVpcFlowLogsConfig` method.
* </pre>
*
* Protobuf type {@code google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest}
*/
public final class UpdateVpcFlowLogsConfigRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest)
UpdateVpcFlowLogsConfigRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateVpcFlowLogsConfigRequest.newBuilder() to construct.
private UpdateVpcFlowLogsConfigRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateVpcFlowLogsConfigRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateVpcFlowLogsConfigRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkmanagement.v1.VpcFlowLogsProto
.internal_static_google_cloud_networkmanagement_v1_UpdateVpcFlowLogsConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkmanagement.v1.VpcFlowLogsProto
.internal_static_google_cloud_networkmanagement_v1_UpdateVpcFlowLogsConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest.class,
com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest.Builder.class);
}
private int bitField0_;
public static final int UPDATE_MASK_FIELD_NUMBER = 1;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int VPC_FLOW_LOGS_CONFIG_FIELD_NUMBER = 2;
private com.google.cloud.networkmanagement.v1.VpcFlowLogsConfig vpcFlowLogsConfig_;
/**
*
*
* <pre>
* Required. Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.networkmanagement.v1.VpcFlowLogsConfig vpc_flow_logs_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the vpcFlowLogsConfig field is set.
*/
@java.lang.Override
public boolean hasVpcFlowLogsConfig() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.networkmanagement.v1.VpcFlowLogsConfig vpc_flow_logs_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The vpcFlowLogsConfig.
*/
@java.lang.Override
public com.google.cloud.networkmanagement.v1.VpcFlowLogsConfig getVpcFlowLogsConfig() {
return vpcFlowLogsConfig_ == null
? com.google.cloud.networkmanagement.v1.VpcFlowLogsConfig.getDefaultInstance()
: vpcFlowLogsConfig_;
}
/**
*
*
* <pre>
* Required. Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.networkmanagement.v1.VpcFlowLogsConfig vpc_flow_logs_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.networkmanagement.v1.VpcFlowLogsConfigOrBuilder
getVpcFlowLogsConfigOrBuilder() {
return vpcFlowLogsConfig_ == null
? com.google.cloud.networkmanagement.v1.VpcFlowLogsConfig.getDefaultInstance()
: vpcFlowLogsConfig_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getVpcFlowLogsConfig());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getVpcFlowLogsConfig());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest)) {
return super.equals(obj);
}
com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest other =
(com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (hasVpcFlowLogsConfig() != other.hasVpcFlowLogsConfig()) return false;
if (hasVpcFlowLogsConfig()) {
if (!getVpcFlowLogsConfig().equals(other.getVpcFlowLogsConfig())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
if (hasVpcFlowLogsConfig()) {
hash = (37 * hash) + VPC_FLOW_LOGS_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getVpcFlowLogsConfig().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for the `UpdateVpcFlowLogsConfig` method.
* </pre>
*
* Protobuf type {@code google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest)
com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkmanagement.v1.VpcFlowLogsProto
.internal_static_google_cloud_networkmanagement_v1_UpdateVpcFlowLogsConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkmanagement.v1.VpcFlowLogsProto
.internal_static_google_cloud_networkmanagement_v1_UpdateVpcFlowLogsConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest.class,
com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest.Builder.class);
}
// Construct using
// com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUpdateMaskFieldBuilder();
getVpcFlowLogsConfigFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
vpcFlowLogsConfig_ = null;
if (vpcFlowLogsConfigBuilder_ != null) {
vpcFlowLogsConfigBuilder_.dispose();
vpcFlowLogsConfigBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.networkmanagement.v1.VpcFlowLogsProto
.internal_static_google_cloud_networkmanagement_v1_UpdateVpcFlowLogsConfigRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest
getDefaultInstanceForType() {
return com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest build() {
com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest buildPartial() {
com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest result =
new com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.vpcFlowLogsConfig_ =
vpcFlowLogsConfigBuilder_ == null
? vpcFlowLogsConfig_
: vpcFlowLogsConfigBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest) {
return mergeFrom(
(com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest other) {
if (other
== com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest
.getDefaultInstance()) return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.hasVpcFlowLogsConfig()) {
mergeVpcFlowLogsConfig(other.getVpcFlowLogsConfig());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(
getVpcFlowLogsConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000001);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in
* this field.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.cloud.networkmanagement.v1.VpcFlowLogsConfig vpcFlowLogsConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkmanagement.v1.VpcFlowLogsConfig,
com.google.cloud.networkmanagement.v1.VpcFlowLogsConfig.Builder,
com.google.cloud.networkmanagement.v1.VpcFlowLogsConfigOrBuilder>
vpcFlowLogsConfigBuilder_;
/**
*
*
* <pre>
* Required. Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.networkmanagement.v1.VpcFlowLogsConfig vpc_flow_logs_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the vpcFlowLogsConfig field is set.
*/
public boolean hasVpcFlowLogsConfig() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.networkmanagement.v1.VpcFlowLogsConfig vpc_flow_logs_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The vpcFlowLogsConfig.
*/
public com.google.cloud.networkmanagement.v1.VpcFlowLogsConfig getVpcFlowLogsConfig() {
if (vpcFlowLogsConfigBuilder_ == null) {
return vpcFlowLogsConfig_ == null
? com.google.cloud.networkmanagement.v1.VpcFlowLogsConfig.getDefaultInstance()
: vpcFlowLogsConfig_;
} else {
return vpcFlowLogsConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.networkmanagement.v1.VpcFlowLogsConfig vpc_flow_logs_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setVpcFlowLogsConfig(
com.google.cloud.networkmanagement.v1.VpcFlowLogsConfig value) {
if (vpcFlowLogsConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
vpcFlowLogsConfig_ = value;
} else {
vpcFlowLogsConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.networkmanagement.v1.VpcFlowLogsConfig vpc_flow_logs_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setVpcFlowLogsConfig(
com.google.cloud.networkmanagement.v1.VpcFlowLogsConfig.Builder builderForValue) {
if (vpcFlowLogsConfigBuilder_ == null) {
vpcFlowLogsConfig_ = builderForValue.build();
} else {
vpcFlowLogsConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.networkmanagement.v1.VpcFlowLogsConfig vpc_flow_logs_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeVpcFlowLogsConfig(
com.google.cloud.networkmanagement.v1.VpcFlowLogsConfig value) {
if (vpcFlowLogsConfigBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& vpcFlowLogsConfig_ != null
&& vpcFlowLogsConfig_
!= com.google.cloud.networkmanagement.v1.VpcFlowLogsConfig.getDefaultInstance()) {
getVpcFlowLogsConfigBuilder().mergeFrom(value);
} else {
vpcFlowLogsConfig_ = value;
}
} else {
vpcFlowLogsConfigBuilder_.mergeFrom(value);
}
if (vpcFlowLogsConfig_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.networkmanagement.v1.VpcFlowLogsConfig vpc_flow_logs_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearVpcFlowLogsConfig() {
bitField0_ = (bitField0_ & ~0x00000002);
vpcFlowLogsConfig_ = null;
if (vpcFlowLogsConfigBuilder_ != null) {
vpcFlowLogsConfigBuilder_.dispose();
vpcFlowLogsConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.networkmanagement.v1.VpcFlowLogsConfig vpc_flow_logs_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.networkmanagement.v1.VpcFlowLogsConfig.Builder
getVpcFlowLogsConfigBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getVpcFlowLogsConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.networkmanagement.v1.VpcFlowLogsConfig vpc_flow_logs_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.networkmanagement.v1.VpcFlowLogsConfigOrBuilder
getVpcFlowLogsConfigOrBuilder() {
if (vpcFlowLogsConfigBuilder_ != null) {
return vpcFlowLogsConfigBuilder_.getMessageOrBuilder();
} else {
return vpcFlowLogsConfig_ == null
? com.google.cloud.networkmanagement.v1.VpcFlowLogsConfig.getDefaultInstance()
: vpcFlowLogsConfig_;
}
}
/**
*
*
* <pre>
* Required. Only fields specified in update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.networkmanagement.v1.VpcFlowLogsConfig vpc_flow_logs_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkmanagement.v1.VpcFlowLogsConfig,
com.google.cloud.networkmanagement.v1.VpcFlowLogsConfig.Builder,
com.google.cloud.networkmanagement.v1.VpcFlowLogsConfigOrBuilder>
getVpcFlowLogsConfigFieldBuilder() {
if (vpcFlowLogsConfigBuilder_ == null) {
vpcFlowLogsConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkmanagement.v1.VpcFlowLogsConfig,
com.google.cloud.networkmanagement.v1.VpcFlowLogsConfig.Builder,
com.google.cloud.networkmanagement.v1.VpcFlowLogsConfigOrBuilder>(
getVpcFlowLogsConfig(), getParentForChildren(), isClean());
vpcFlowLogsConfig_ = null;
}
return vpcFlowLogsConfigBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest)
private static final com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest();
}
public static com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateVpcFlowLogsConfigRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateVpcFlowLogsConfigRequest>() {
@java.lang.Override
public UpdateVpcFlowLogsConfigRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateVpcFlowLogsConfigRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateVpcFlowLogsConfigRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.networkmanagement.v1.UpdateVpcFlowLogsConfigRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,185 | java-service-usage/proto-google-cloud-service-usage-v1beta1/src/main/java/com/google/api/serviceusage/v1beta1/OverrideInlineSource.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/api/serviceusage/v1beta1/resources.proto
// Protobuf Java Version: 3.25.8
package com.google.api.serviceusage.v1beta1;
/**
*
*
* <pre>
* Import data embedded in the request message
* </pre>
*
* Protobuf type {@code google.api.serviceusage.v1beta1.OverrideInlineSource}
*/
public final class OverrideInlineSource extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.api.serviceusage.v1beta1.OverrideInlineSource)
OverrideInlineSourceOrBuilder {
private static final long serialVersionUID = 0L;
// Use OverrideInlineSource.newBuilder() to construct.
private OverrideInlineSource(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private OverrideInlineSource() {
overrides_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new OverrideInlineSource();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.serviceusage.v1beta1.ResourcesProto
.internal_static_google_api_serviceusage_v1beta1_OverrideInlineSource_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.serviceusage.v1beta1.ResourcesProto
.internal_static_google_api_serviceusage_v1beta1_OverrideInlineSource_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.serviceusage.v1beta1.OverrideInlineSource.class,
com.google.api.serviceusage.v1beta1.OverrideInlineSource.Builder.class);
}
public static final int OVERRIDES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.api.serviceusage.v1beta1.QuotaOverride> overrides_;
/**
*
*
* <pre>
* The overrides to create.
* Each override must have a value for 'metric' and 'unit', to specify
* which metric and which limit the override should be applied to.
* The 'name' field of the override does not need to be set; it is ignored.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.api.serviceusage.v1beta1.QuotaOverride> getOverridesList() {
return overrides_;
}
/**
*
*
* <pre>
* The overrides to create.
* Each override must have a value for 'metric' and 'unit', to specify
* which metric and which limit the override should be applied to.
* The 'name' field of the override does not need to be set; it is ignored.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.api.serviceusage.v1beta1.QuotaOverrideOrBuilder>
getOverridesOrBuilderList() {
return overrides_;
}
/**
*
*
* <pre>
* The overrides to create.
* Each override must have a value for 'metric' and 'unit', to specify
* which metric and which limit the override should be applied to.
* The 'name' field of the override does not need to be set; it is ignored.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
@java.lang.Override
public int getOverridesCount() {
return overrides_.size();
}
/**
*
*
* <pre>
* The overrides to create.
* Each override must have a value for 'metric' and 'unit', to specify
* which metric and which limit the override should be applied to.
* The 'name' field of the override does not need to be set; it is ignored.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
@java.lang.Override
public com.google.api.serviceusage.v1beta1.QuotaOverride getOverrides(int index) {
return overrides_.get(index);
}
/**
*
*
* <pre>
* The overrides to create.
* Each override must have a value for 'metric' and 'unit', to specify
* which metric and which limit the override should be applied to.
* The 'name' field of the override does not need to be set; it is ignored.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
@java.lang.Override
public com.google.api.serviceusage.v1beta1.QuotaOverrideOrBuilder getOverridesOrBuilder(
int index) {
return overrides_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < overrides_.size(); i++) {
output.writeMessage(1, overrides_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < overrides_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, overrides_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.api.serviceusage.v1beta1.OverrideInlineSource)) {
return super.equals(obj);
}
com.google.api.serviceusage.v1beta1.OverrideInlineSource other =
(com.google.api.serviceusage.v1beta1.OverrideInlineSource) obj;
if (!getOverridesList().equals(other.getOverridesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getOverridesCount() > 0) {
hash = (37 * hash) + OVERRIDES_FIELD_NUMBER;
hash = (53 * hash) + getOverridesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.api.serviceusage.v1beta1.OverrideInlineSource parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.serviceusage.v1beta1.OverrideInlineSource parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.serviceusage.v1beta1.OverrideInlineSource parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.serviceusage.v1beta1.OverrideInlineSource parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.serviceusage.v1beta1.OverrideInlineSource parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.serviceusage.v1beta1.OverrideInlineSource parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.serviceusage.v1beta1.OverrideInlineSource parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.serviceusage.v1beta1.OverrideInlineSource parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.serviceusage.v1beta1.OverrideInlineSource parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.api.serviceusage.v1beta1.OverrideInlineSource parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.serviceusage.v1beta1.OverrideInlineSource parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.serviceusage.v1beta1.OverrideInlineSource parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.api.serviceusage.v1beta1.OverrideInlineSource prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Import data embedded in the request message
* </pre>
*
* Protobuf type {@code google.api.serviceusage.v1beta1.OverrideInlineSource}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.api.serviceusage.v1beta1.OverrideInlineSource)
com.google.api.serviceusage.v1beta1.OverrideInlineSourceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.serviceusage.v1beta1.ResourcesProto
.internal_static_google_api_serviceusage_v1beta1_OverrideInlineSource_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.serviceusage.v1beta1.ResourcesProto
.internal_static_google_api_serviceusage_v1beta1_OverrideInlineSource_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.serviceusage.v1beta1.OverrideInlineSource.class,
com.google.api.serviceusage.v1beta1.OverrideInlineSource.Builder.class);
}
// Construct using com.google.api.serviceusage.v1beta1.OverrideInlineSource.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (overridesBuilder_ == null) {
overrides_ = java.util.Collections.emptyList();
} else {
overrides_ = null;
overridesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.api.serviceusage.v1beta1.ResourcesProto
.internal_static_google_api_serviceusage_v1beta1_OverrideInlineSource_descriptor;
}
@java.lang.Override
public com.google.api.serviceusage.v1beta1.OverrideInlineSource getDefaultInstanceForType() {
return com.google.api.serviceusage.v1beta1.OverrideInlineSource.getDefaultInstance();
}
@java.lang.Override
public com.google.api.serviceusage.v1beta1.OverrideInlineSource build() {
com.google.api.serviceusage.v1beta1.OverrideInlineSource result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.api.serviceusage.v1beta1.OverrideInlineSource buildPartial() {
com.google.api.serviceusage.v1beta1.OverrideInlineSource result =
new com.google.api.serviceusage.v1beta1.OverrideInlineSource(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.api.serviceusage.v1beta1.OverrideInlineSource result) {
if (overridesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
overrides_ = java.util.Collections.unmodifiableList(overrides_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.overrides_ = overrides_;
} else {
result.overrides_ = overridesBuilder_.build();
}
}
private void buildPartial0(com.google.api.serviceusage.v1beta1.OverrideInlineSource result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.api.serviceusage.v1beta1.OverrideInlineSource) {
return mergeFrom((com.google.api.serviceusage.v1beta1.OverrideInlineSource) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.api.serviceusage.v1beta1.OverrideInlineSource other) {
if (other == com.google.api.serviceusage.v1beta1.OverrideInlineSource.getDefaultInstance())
return this;
if (overridesBuilder_ == null) {
if (!other.overrides_.isEmpty()) {
if (overrides_.isEmpty()) {
overrides_ = other.overrides_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureOverridesIsMutable();
overrides_.addAll(other.overrides_);
}
onChanged();
}
} else {
if (!other.overrides_.isEmpty()) {
if (overridesBuilder_.isEmpty()) {
overridesBuilder_.dispose();
overridesBuilder_ = null;
overrides_ = other.overrides_;
bitField0_ = (bitField0_ & ~0x00000001);
overridesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getOverridesFieldBuilder()
: null;
} else {
overridesBuilder_.addAllMessages(other.overrides_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.api.serviceusage.v1beta1.QuotaOverride m =
input.readMessage(
com.google.api.serviceusage.v1beta1.QuotaOverride.parser(),
extensionRegistry);
if (overridesBuilder_ == null) {
ensureOverridesIsMutable();
overrides_.add(m);
} else {
overridesBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.api.serviceusage.v1beta1.QuotaOverride> overrides_ =
java.util.Collections.emptyList();
private void ensureOverridesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
overrides_ =
new java.util.ArrayList<com.google.api.serviceusage.v1beta1.QuotaOverride>(overrides_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.api.serviceusage.v1beta1.QuotaOverride,
com.google.api.serviceusage.v1beta1.QuotaOverride.Builder,
com.google.api.serviceusage.v1beta1.QuotaOverrideOrBuilder>
overridesBuilder_;
/**
*
*
* <pre>
* The overrides to create.
* Each override must have a value for 'metric' and 'unit', to specify
* which metric and which limit the override should be applied to.
* The 'name' field of the override does not need to be set; it is ignored.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public java.util.List<com.google.api.serviceusage.v1beta1.QuotaOverride> getOverridesList() {
if (overridesBuilder_ == null) {
return java.util.Collections.unmodifiableList(overrides_);
} else {
return overridesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The overrides to create.
* Each override must have a value for 'metric' and 'unit', to specify
* which metric and which limit the override should be applied to.
* The 'name' field of the override does not need to be set; it is ignored.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public int getOverridesCount() {
if (overridesBuilder_ == null) {
return overrides_.size();
} else {
return overridesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The overrides to create.
* Each override must have a value for 'metric' and 'unit', to specify
* which metric and which limit the override should be applied to.
* The 'name' field of the override does not need to be set; it is ignored.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public com.google.api.serviceusage.v1beta1.QuotaOverride getOverrides(int index) {
if (overridesBuilder_ == null) {
return overrides_.get(index);
} else {
return overridesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The overrides to create.
* Each override must have a value for 'metric' and 'unit', to specify
* which metric and which limit the override should be applied to.
* The 'name' field of the override does not need to be set; it is ignored.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public Builder setOverrides(
int index, com.google.api.serviceusage.v1beta1.QuotaOverride value) {
if (overridesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOverridesIsMutable();
overrides_.set(index, value);
onChanged();
} else {
overridesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The overrides to create.
* Each override must have a value for 'metric' and 'unit', to specify
* which metric and which limit the override should be applied to.
* The 'name' field of the override does not need to be set; it is ignored.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public Builder setOverrides(
int index, com.google.api.serviceusage.v1beta1.QuotaOverride.Builder builderForValue) {
if (overridesBuilder_ == null) {
ensureOverridesIsMutable();
overrides_.set(index, builderForValue.build());
onChanged();
} else {
overridesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The overrides to create.
* Each override must have a value for 'metric' and 'unit', to specify
* which metric and which limit the override should be applied to.
* The 'name' field of the override does not need to be set; it is ignored.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public Builder addOverrides(com.google.api.serviceusage.v1beta1.QuotaOverride value) {
if (overridesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOverridesIsMutable();
overrides_.add(value);
onChanged();
} else {
overridesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The overrides to create.
* Each override must have a value for 'metric' and 'unit', to specify
* which metric and which limit the override should be applied to.
* The 'name' field of the override does not need to be set; it is ignored.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public Builder addOverrides(
int index, com.google.api.serviceusage.v1beta1.QuotaOverride value) {
if (overridesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOverridesIsMutable();
overrides_.add(index, value);
onChanged();
} else {
overridesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The overrides to create.
* Each override must have a value for 'metric' and 'unit', to specify
* which metric and which limit the override should be applied to.
* The 'name' field of the override does not need to be set; it is ignored.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public Builder addOverrides(
com.google.api.serviceusage.v1beta1.QuotaOverride.Builder builderForValue) {
if (overridesBuilder_ == null) {
ensureOverridesIsMutable();
overrides_.add(builderForValue.build());
onChanged();
} else {
overridesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The overrides to create.
* Each override must have a value for 'metric' and 'unit', to specify
* which metric and which limit the override should be applied to.
* The 'name' field of the override does not need to be set; it is ignored.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public Builder addOverrides(
int index, com.google.api.serviceusage.v1beta1.QuotaOverride.Builder builderForValue) {
if (overridesBuilder_ == null) {
ensureOverridesIsMutable();
overrides_.add(index, builderForValue.build());
onChanged();
} else {
overridesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The overrides to create.
* Each override must have a value for 'metric' and 'unit', to specify
* which metric and which limit the override should be applied to.
* The 'name' field of the override does not need to be set; it is ignored.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public Builder addAllOverrides(
java.lang.Iterable<? extends com.google.api.serviceusage.v1beta1.QuotaOverride> values) {
if (overridesBuilder_ == null) {
ensureOverridesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, overrides_);
onChanged();
} else {
overridesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The overrides to create.
* Each override must have a value for 'metric' and 'unit', to specify
* which metric and which limit the override should be applied to.
* The 'name' field of the override does not need to be set; it is ignored.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public Builder clearOverrides() {
if (overridesBuilder_ == null) {
overrides_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
overridesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The overrides to create.
* Each override must have a value for 'metric' and 'unit', to specify
* which metric and which limit the override should be applied to.
* The 'name' field of the override does not need to be set; it is ignored.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public Builder removeOverrides(int index) {
if (overridesBuilder_ == null) {
ensureOverridesIsMutable();
overrides_.remove(index);
onChanged();
} else {
overridesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The overrides to create.
* Each override must have a value for 'metric' and 'unit', to specify
* which metric and which limit the override should be applied to.
* The 'name' field of the override does not need to be set; it is ignored.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public com.google.api.serviceusage.v1beta1.QuotaOverride.Builder getOverridesBuilder(
int index) {
return getOverridesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The overrides to create.
* Each override must have a value for 'metric' and 'unit', to specify
* which metric and which limit the override should be applied to.
* The 'name' field of the override does not need to be set; it is ignored.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public com.google.api.serviceusage.v1beta1.QuotaOverrideOrBuilder getOverridesOrBuilder(
int index) {
if (overridesBuilder_ == null) {
return overrides_.get(index);
} else {
return overridesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The overrides to create.
* Each override must have a value for 'metric' and 'unit', to specify
* which metric and which limit the override should be applied to.
* The 'name' field of the override does not need to be set; it is ignored.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public java.util.List<? extends com.google.api.serviceusage.v1beta1.QuotaOverrideOrBuilder>
getOverridesOrBuilderList() {
if (overridesBuilder_ != null) {
return overridesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(overrides_);
}
}
/**
*
*
* <pre>
* The overrides to create.
* Each override must have a value for 'metric' and 'unit', to specify
* which metric and which limit the override should be applied to.
* The 'name' field of the override does not need to be set; it is ignored.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public com.google.api.serviceusage.v1beta1.QuotaOverride.Builder addOverridesBuilder() {
return getOverridesFieldBuilder()
.addBuilder(com.google.api.serviceusage.v1beta1.QuotaOverride.getDefaultInstance());
}
/**
*
*
* <pre>
* The overrides to create.
* Each override must have a value for 'metric' and 'unit', to specify
* which metric and which limit the override should be applied to.
* The 'name' field of the override does not need to be set; it is ignored.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public com.google.api.serviceusage.v1beta1.QuotaOverride.Builder addOverridesBuilder(
int index) {
return getOverridesFieldBuilder()
.addBuilder(
index, com.google.api.serviceusage.v1beta1.QuotaOverride.getDefaultInstance());
}
/**
*
*
* <pre>
* The overrides to create.
* Each override must have a value for 'metric' and 'unit', to specify
* which metric and which limit the override should be applied to.
* The 'name' field of the override does not need to be set; it is ignored.
* </pre>
*
* <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code>
*/
public java.util.List<com.google.api.serviceusage.v1beta1.QuotaOverride.Builder>
getOverridesBuilderList() {
return getOverridesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.api.serviceusage.v1beta1.QuotaOverride,
com.google.api.serviceusage.v1beta1.QuotaOverride.Builder,
com.google.api.serviceusage.v1beta1.QuotaOverrideOrBuilder>
getOverridesFieldBuilder() {
if (overridesBuilder_ == null) {
overridesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.api.serviceusage.v1beta1.QuotaOverride,
com.google.api.serviceusage.v1beta1.QuotaOverride.Builder,
com.google.api.serviceusage.v1beta1.QuotaOverrideOrBuilder>(
overrides_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
overrides_ = null;
}
return overridesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.api.serviceusage.v1beta1.OverrideInlineSource)
}
// @@protoc_insertion_point(class_scope:google.api.serviceusage.v1beta1.OverrideInlineSource)
private static final com.google.api.serviceusage.v1beta1.OverrideInlineSource DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.api.serviceusage.v1beta1.OverrideInlineSource();
}
public static com.google.api.serviceusage.v1beta1.OverrideInlineSource getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<OverrideInlineSource> PARSER =
new com.google.protobuf.AbstractParser<OverrideInlineSource>() {
@java.lang.Override
public OverrideInlineSource parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<OverrideInlineSource> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<OverrideInlineSource> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.api.serviceusage.v1beta1.OverrideInlineSource getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/incubator-seata | 35,536 | common/src/main/java/org/apache/seata/common/ConfigurationKeys.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.seata.common;
/**
* The type Configuration keys.
*
*/
public interface ConfigurationKeys {
/**
* The constant SEATA_FILE_ROOT_CONFIG
*/
String SEATA_FILE_ROOT_CONFIG = "seata";
/**
* The constant FILE_ROOT_REGISTRY.
*/
String FILE_ROOT_REGISTRY = "registry";
/**
* The constant FILE_ROOT_CONFIG.
*/
String FILE_ROOT_CONFIG = "config";
/**
* The constant FILE_CONFIG_SPLIT_CHAR.
*/
String FILE_CONFIG_SPLIT_CHAR = ".";
/**
* The constant FILE_ROOT_PREFIX_REGISTRY.
*/
String FILE_ROOT_PREFIX_REGISTRY = FILE_ROOT_REGISTRY + FILE_CONFIG_SPLIT_CHAR;
/**
* The constant FILE_ROOT_PREFIX_CONFIG.
*/
String FILE_ROOT_PREFIX_CONFIG = FILE_ROOT_CONFIG + FILE_CONFIG_SPLIT_CHAR;
/**
* The constant SEATA_FILE_PREFIX_ROOT_CONFIG
*/
String SEATA_FILE_PREFIX_ROOT_CONFIG = SEATA_FILE_ROOT_CONFIG + FILE_CONFIG_SPLIT_CHAR;
/**
* The constant FILE_ROOT_TYPE.
*/
String FILE_ROOT_TYPE = "type";
/**
* The constant DATA_TYPE.
*/
String DATA_TYPE = "dataType";
/**
* The constant SEATA_PREFIX.
*/
String SEATA_PREFIX = SEATA_FILE_ROOT_CONFIG + ".";
/**
* The constant SERVICE_PREFIX.
*/
String SERVICE_PREFIX = "service.";
/**
* The constant STORE_PREFIX.
*/
String STORE_PREFIX = "store.";
/**
* The constant SESSION_PREFIX.
*/
String SESSION_PREFIX = "session.";
/**
* The constant STORE_SESSION_PREFIX.
*/
String STORE_SESSION_PREFIX = STORE_PREFIX + SESSION_PREFIX;
/**
* The constant MODE.
*/
String MODE = "mode";
/**
* The constant STORE_MODE.
*/
String STORE_MODE = STORE_PREFIX + MODE;
/**
* The constant SERVER_STORE_MODE.
*/
String SERVER_STORE_MODE = SEATA_PREFIX + STORE_PREFIX + MODE;
/**
* The constant STORE_LOCK_MODE.
*/
String STORE_LOCK_MODE = STORE_PREFIX + "lock." + MODE;
/**
* The constant SERVER_STORE_LOCK_MODE.
*/
String SERVER_STORE_LOCK_MODE = SEATA_PREFIX + STORE_PREFIX + "lock." + MODE;
/**
* The constant STORE_SESSION_MODE.
*/
String STORE_SESSION_MODE = STORE_SESSION_PREFIX + MODE;
/**
* The constant SERVER_STORE_SESSION_MODE.
*/
String SERVER_STORE_SESSION_MODE = SEATA_PREFIX + STORE_SESSION_PREFIX + MODE;
/**
* The constant STORE_PUBLIC_KEY.
*/
String STORE_PUBLIC_KEY = STORE_PREFIX + "publicKey";
/**
* The constant STORE_FILE_PREFIX
*/
String STORE_FILE_PREFIX = STORE_PREFIX + "file.";
/**
* The constant STORE_FILE_DIR
*/
String STORE_FILE_DIR = STORE_FILE_PREFIX + "dir";
/**
* The constant SERVICE_GROUP_MAPPING_PREFIX.
*/
String SERVICE_GROUP_MAPPING_PREFIX = SERVICE_PREFIX + "vgroupMapping.";
/**
* The constant GROUPLIST_POSTFIX.
*/
String GROUPLIST_POSTFIX = ".grouplist";
/**
* The constant SERVER_NODE_SPLIT_CHAR.
*/
String SERVER_NODE_SPLIT_CHAR = System.getProperty("line.separator");
/**
* The constant CLIENT_PREFIX.
*/
String CLIENT_PREFIX = "client.";
/**
* The constant SERVER_PREFIX.
*/
String SERVER_PREFIX = "server.";
/**
* The constant TRANSPORT_PREFIX.
*/
String TRANSPORT_PREFIX = "transport.";
/**
* The constant CLIENT_RM_PREFIX.
*/
String CLIENT_RM_PREFIX = CLIENT_PREFIX + "rm.";
/**
* The constant CLIENT_ASYNC_COMMIT_BUFFER_LIMIT.
*/
String CLIENT_ASYNC_COMMIT_BUFFER_LIMIT = CLIENT_RM_PREFIX + "asyncCommitBufferLimit";
/**
* The constant CLIENT_RM_LOCK_PREFIX.
*/
String CLIENT_RM_LOCK_PREFIX = CLIENT_RM_PREFIX + "lock.";
/**
* The constant CLIENT_LOCK_RETRY_TIMES.
*/
String CLIENT_LOCK_RETRY_TIMES = CLIENT_RM_LOCK_PREFIX + "retryTimes";
/**
* The constant CLIENT_LOCK_RETRY_INTERVAL.
*/
String CLIENT_LOCK_RETRY_INTERVAL = CLIENT_RM_LOCK_PREFIX + "retryInterval";
/**
* The constant CLIENT_LOCK_RETRY_POLICY_BRANCH_ROLLBACK_ON_CONFLICT.
*/
String CLIENT_LOCK_RETRY_POLICY_BRANCH_ROLLBACK_ON_CONFLICT =
CLIENT_RM_LOCK_PREFIX + "retryPolicyBranchRollbackOnConflict";
/**
* The constant SERVICE_SESSION_RELOAD_READ_SIZE
*/
String SERVICE_SESSION_RELOAD_READ_SIZE = STORE_FILE_PREFIX + "sessionReloadReadSize";
/**
* The constant CLIENT_REPORT_SUCCESS_ENABLE.
*/
String CLIENT_REPORT_SUCCESS_ENABLE = CLIENT_RM_PREFIX + "reportSuccessEnable";
/**
* The constant CLIENT_SAGA_BRANCH_REGISTER_ENABLE.
*/
String CLIENT_SAGA_BRANCH_REGISTER_ENABLE = CLIENT_RM_PREFIX + "sagaBranchRegisterEnable";
/**
* The constant CLIENT_SAGA_JSON_PARSER.
*/
String CLIENT_SAGA_JSON_PARSER = CLIENT_RM_PREFIX + "sagaJsonParser";
/**
* The constant CLIENT_SAGA_RETRY_PERSIST_MODE_UPDATE.
*/
String CLIENT_SAGA_RETRY_PERSIST_MODE_UPDATE = CLIENT_RM_PREFIX + "sagaRetryPersistModeUpdate";
/**
* The constant CLIENT_SAGA_COMPENSATE_PERSIST_MODE_UPDATE.
*/
String CLIENT_SAGA_COMPENSATE_PERSIST_MODE_UPDATE = CLIENT_RM_PREFIX + "sagaCompensatePersistModeUpdate";
/**
* The constant CLIENT_REPORT_RETRY_COUNT.
*/
String CLIENT_REPORT_RETRY_COUNT = CLIENT_RM_PREFIX + "reportRetryCount";
/**
* The constant CLIENT_TABLE_META_CHECK_ENABLE.
*/
String CLIENT_TABLE_META_CHECK_ENABLE = CLIENT_RM_PREFIX + "tableMetaCheckEnable";
/**
* The constant CLIENT_TABLE_META_CHECKER_INTERVAL.
*/
String CLIENT_TABLE_META_CHECKER_INTERVAL = CLIENT_RM_PREFIX + "tableMetaCheckerInterval";
/**
* The constant TCC_ACTION_INTERCEPTOR_ORDER.
*/
String TCC_ACTION_INTERCEPTOR_ORDER = CLIENT_RM_PREFIX + "tccActionInterceptorOrder";
/**
* The constant CLIENT_TM_PREFIX.
*/
String CLIENT_TM_PREFIX = CLIENT_PREFIX + "tm.";
/**
* The constant CLIENT_TM_COMMIT_RETRY_TIMES.
*/
String CLIENT_TM_COMMIT_RETRY_COUNT = CLIENT_TM_PREFIX + "commitRetryCount";
/**
* The constant CLIENT_TM_ROLLBACK_RETRY_TIMES.
*/
String CLIENT_TM_ROLLBACK_RETRY_COUNT = CLIENT_TM_PREFIX + "rollbackRetryCount";
/**
* The constant DEFAULT_GLOBAL_TRANSACTION_TIMEOUT.
*/
String DEFAULT_GLOBAL_TRANSACTION_TIMEOUT = CLIENT_TM_PREFIX + "defaultGlobalTransactionTimeout";
/**
* The constant SERIALIZE_FOR_RPC.
*/
String SERIALIZE_FOR_RPC = TRANSPORT_PREFIX + "serialization";
/**
* The constant COMPRESSOR_FOR_RPC.
*
* @since 0.7.0
*/
String COMPRESSOR_FOR_RPC = TRANSPORT_PREFIX + "compressor";
/**
* The constant STORE_DB_PREFIX.
*/
String STORE_DB_PREFIX = "store.db.";
/**
* The constant STORE_DB_DRUID_PREFIX.
*/
String STORE_DB_DRUID_PREFIX = "store.db.druid.";
/**
* The constant STORE_DB_HIKARI_PREFIX.
*/
String STORE_DB_HIKARI_PREFIX = "store.db.hikari.";
/**
* The constant STORE_DB_DBCP_PREFIX.
*/
String STORE_DB_DBCP_PREFIX = "store.db.dbcp.";
/**
* The constant STORE_REDIS_PREFIX.
*/
String STORE_REDIS_PREFIX = "store.redis.";
/**
* The constant STORE_DB_GLOBAL_TABLE.
*/
String STORE_DB_GLOBAL_TABLE = STORE_DB_PREFIX + "globalTable";
/**
* The constant STORE_DB_BRANCH_TABLE.
*/
String STORE_DB_BRANCH_TABLE = STORE_DB_PREFIX + "branchTable";
/**
* The constant DISTRIBUTED_LOCK_DB_TABLE.
*/
String DISTRIBUTED_LOCK_DB_TABLE = STORE_DB_PREFIX + "distributedLockTable";
/**
* The constant STORE_DB_DATASOURCE_TYPE.
*/
String STORE_DB_DATASOURCE_TYPE = STORE_DB_PREFIX + "datasource";
/**
* The constant STORE_DB_TYPE.
*/
String STORE_DB_TYPE = STORE_DB_PREFIX + "dbType";
/**
* The constant STORE_DB_DRIVER_CLASS_NAME.
*/
String STORE_DB_DRIVER_CLASS_NAME = STORE_DB_PREFIX + "driverClassName";
/**
* The constant STORE_DB_MAX_WAIT.
*/
String STORE_DB_MAX_WAIT = STORE_DB_PREFIX + "maxWait";
/**
* The constant STORE_DB_URL.
*/
String STORE_DB_URL = STORE_DB_PREFIX + "url";
/**
* The constant STORE_DB_USER.
*/
String STORE_DB_USER = STORE_DB_PREFIX + "user";
/**
* The constant STORE_DB_PASSWORD.
*/
String STORE_DB_PASSWORD = STORE_DB_PREFIX + "password";
/**
* The constant STORE_DB_MIN_CONN.
*/
String STORE_DB_MIN_CONN = STORE_DB_PREFIX + "minConn";
/**
* The constant STORE_DB_MAX_CONN.
*/
String STORE_DB_MAX_CONN = STORE_DB_PREFIX + "maxConn";
/**
* The constant STORE_DB_LOG_QUERY_LIMIT.
*/
String STORE_DB_LOG_QUERY_LIMIT = STORE_DB_PREFIX + "queryLimit";
/**
* The constant STORE_DB_DRUID_TIME_BETWEEN_EVICTION_RUNS_MILLIS.
*/
String STORE_DB_DRUID_TIME_BETWEEN_EVICTION_RUNS_MILLIS = STORE_DB_DRUID_PREFIX + "timeBetweenEvictionRunsMillis";
/**
* The constant STORE_DB_DRUID_MIN_EVICTABLE_TIME_MILLIS.
*/
String STORE_DB_DRUID_MIN_EVICTABLE_TIME_MILLIS = STORE_DB_DRUID_PREFIX + "minEvictableIdleTimeMillis";
/**
* The constant STORE_DB_DRUID_TEST_WHILE_IDLE.
*/
String STORE_DB_DRUID_TEST_WHILE_IDLE = STORE_DB_DRUID_PREFIX + "testWhileIdle";
/**
* The constant STORE_DB_DRUID_TEST_ON_BORROW.
*/
String STORE_DB_DRUID_TEST_ON_BORROW = STORE_DB_DRUID_PREFIX + "testOnBorrow";
/**
* The constant STORE_DB_DRUID_KEEP_ALIVE.
*/
String STORE_DB_DRUID_KEEP_ALIVE = STORE_DB_DRUID_PREFIX + "keepAlive";
/**
* The constant STORE_DB_HIKARI_IDLE_TIMEOUT.
*/
String STORE_DB_HIKARI_IDLE_TIMEOUT = STORE_DB_HIKARI_PREFIX + "idleTimeout";
/**
* The constant STORE_DB_HIKARI_KEEPALIVE_TIME.
*/
String STORE_DB_HIKARI_KEEPALIVE_TIME = STORE_DB_HIKARI_PREFIX + "keepaliveTime";
/**
* The constant STORE_DB_HIKARI_MAX_LIFE_TIME.
*/
String STORE_DB_HIKARI_MAX_LIFE_TIME = STORE_DB_HIKARI_PREFIX + "maxLifetime";
/**
* The constant STORE_DB_HIKARI_VALIDATION_TIMEOUT.
*/
String STORE_DB_HIKARI_VALIDATION_TIMEOUT = STORE_DB_HIKARI_PREFIX + "validationTimeout";
/**
* The constant STORE_DB_DBCP_TIME_BETWEEN_EVICTION_RUNS_MILLIS.
*/
String STORE_DB_DBCP_TIME_BETWEEN_EVICTION_RUNS_MILLIS = STORE_DB_DBCP_PREFIX + "timeBetweenEvictionRunsMillis";
/**
* The constant STORE_DB_DBCP_MIN_EVICTABLE_TIME_MILLIS.
*/
String STORE_DB_DBCP_MIN_EVICTABLE_TIME_MILLIS = STORE_DB_DBCP_PREFIX + "minEvictableIdleTimeMillis";
/**
* The constant STORE_DB_DBCP_TEST_WHILE_IDLE.
*/
String STORE_DB_DBCP_TEST_WHILE_IDLE = STORE_DB_DBCP_PREFIX + "testWhileIdle";
/**
* The constant STORE_DB_DBCP_TEST_ON_BORROW.
*/
String STORE_DB_DBCP_TEST_ON_BORROW = STORE_DB_DBCP_PREFIX + "testOnBorrow";
/**
* The constant LOCK_DB_TABLE.
*/
String LOCK_DB_TABLE = STORE_DB_PREFIX + "lockTable";
/**
* The constant SERVER_RPC_PORT.
*/
String SERVER_SERVICE_PORT_CAMEL = SERVER_PREFIX + "servicePort";
/**
* The constant SERVER_RAFT_PORT.
*/
String SERVER_RAFT_PORT_CAMEL = SERVER_PREFIX + "raftPort";
/**
* The constant SERVER_SERVICE_PORT_CONFIG.
*/
String SERVER_SERVICE_PORT_CONFIG = SEATA_PREFIX + SERVER_PREFIX + "service-port";
/**
* The constant ENV_SEATA_PORT_KEY.
*/
String ENV_SEATA_PORT_KEY = "SEATA_PORT";
/**
* The constant RECOVERY_PREFIX.
*/
String RECOVERY_PREFIX = SERVER_PREFIX + "recovery.";
/**
* The constant COMMITING_RETRY_PERIOD.
*/
String COMMITING_RETRY_PERIOD = RECOVERY_PREFIX + "committingRetryPeriod";
/**
* The constant ASYN_COMMITING_RETRY_PERIOD.
*/
String ASYNC_COMMITING_RETRY_PERIOD = RECOVERY_PREFIX + "asyncCommittingRetryPeriod";
/**
* The constant ROLLBACKING_RETRY_PERIOD.
*/
String ROLLBACKING_RETRY_PERIOD = RECOVERY_PREFIX + "rollbackingRetryPeriod";
/**
* The constant END_STATUS_RETRY_PERIOD.
*/
String END_STATUS_RETRY_PERIOD = RECOVERY_PREFIX + "endstatusRetryPeriod";
/**
* The constant TIMEOUT_RETRY_PERIOD.
*/
String TIMEOUT_RETRY_PERIOD = RECOVERY_PREFIX + "timeoutRetryPeriod";
/**
* The constant CLIENT_UNDO_PREFIX.
*/
String CLIENT_UNDO_PREFIX = "client.undo.";
/**
* The constant TRANSACTION_UNDO_DATA_VALIDATION.
*/
String TRANSACTION_UNDO_DATA_VALIDATION = CLIENT_UNDO_PREFIX + "dataValidation";
/**
* The constant TRANSACTION_UNDO_LOG_SERIALIZATION.
*/
String TRANSACTION_UNDO_LOG_SERIALIZATION = CLIENT_UNDO_PREFIX + "logSerialization";
/**
* The constant TRANSACTION_UNDO_ONLY_CARE_UPDATE_COLUMNS.
*/
String TRANSACTION_UNDO_ONLY_CARE_UPDATE_COLUMNS = CLIENT_UNDO_PREFIX + "onlyCareUpdateColumns";
/**
* the constant CLIENT_UNDO_COMPRESS_PREFIX
*/
String CLIENT_UNDO_COMPRESS_PREFIX = CLIENT_UNDO_PREFIX + "compress.";
/**
* the constant CLIENT_UNDO_COMPRESS_TYPE
*/
String CLIENT_UNDO_COMPRESS_TYPE = CLIENT_UNDO_COMPRESS_PREFIX + "type";
/**
* the constant CLIENT_UNDO_COMPRESS_ENABLE
*/
String CLIENT_UNDO_COMPRESS_ENABLE = CLIENT_UNDO_COMPRESS_PREFIX + "enable";
/**
* the constant CLIENT_UNDO_COMPRESS_THRESHOLD
*/
String CLIENT_UNDO_COMPRESS_THRESHOLD = CLIENT_UNDO_COMPRESS_PREFIX + "threshold";
/**
* The constant METRICS_PREFIX.
*/
String METRICS_PREFIX = "metrics.";
/**
* The constant METRICS_ENABLED.
*/
String METRICS_ENABLED = "enabled";
/**
* The constant METRICS_REGISTRY_TYPE.
*/
String METRICS_REGISTRY_TYPE = "registryType";
/**
* The constant METRICS_EXPORTER_LIST.
*/
String METRICS_EXPORTER_LIST = "exporterList";
/**
* The constant METRICS_EXPORTER_PROMETHEUS_PORT
*/
String METRICS_EXPORTER_PROMETHEUS_PORT = "exporterPrometheusPort";
/**
* The constant SERVER_UNDO_PREFIX.
*/
String SERVER_UNDO_PREFIX = SERVER_PREFIX + "undo.";
/**
* The constant TRANSACTION_UNDO_LOG_SAVE_DAYS.
*/
String TRANSACTION_UNDO_LOG_SAVE_DAYS = SERVER_UNDO_PREFIX + "logSaveDays";
/**
* The constant TRANSACTION_UNDO_LOG_DELETE_PERIOD
*/
String TRANSACTION_UNDO_LOG_DELETE_PERIOD = SERVER_UNDO_PREFIX + "logDeletePeriod";
/**
* The constant TRANSACTION_UNDO_LOG_TABLE
*/
String TRANSACTION_UNDO_LOG_TABLE = CLIENT_UNDO_PREFIX + "logTable";
/**
* The constant LOG_PREFIX
*/
String LOG_PREFIX = "log.";
/**
* The constant TRANSACTION_UNDO_LOG_EXCEPTION_RATE
*/
String TRANSACTION_LOG_EXCEPTION_RATE = LOG_PREFIX + "exceptionRate";
/**
* The constant MAX_COMMIT_RETRY_TIMEOUT.
*/
String MAX_COMMIT_RETRY_TIMEOUT = SERVER_PREFIX + "maxCommitRetryTimeout";
/**
* The constant MAX_ROLLBACK_RETRY_TIMEOUT.
*/
String MAX_ROLLBACK_RETRY_TIMEOUT = SERVER_PREFIX + "maxRollbackRetryTimeout";
/**
* The constant ROLLBACK_RETRY_TIMEOUT_UNLOCK_ENABLE.
* This configuration is deprecated, please use {@link #ROLLBACK_FAILED_UNLOCK_ENABLE} instead.
*/
@Deprecated
String ROLLBACK_RETRY_TIMEOUT_UNLOCK_ENABLE = SERVER_PREFIX + "rollbackRetryTimeoutUnlockEnable";
/**
* The constant ROLLBACK_FAILED_UNLOCK_ENABLE.
*/
String ROLLBACK_FAILED_UNLOCK_ENABLE = SERVER_PREFIX + "rollbackFailedUnlockEnable";
/**
* the constant RETRY_DEAD_THRESHOLD
*/
String RETRY_DEAD_THRESHOLD = SERVER_PREFIX + "retryDeadThreshold";
/**
* the constant END_STATE_RETRY_DEAD_THRESHOLD
*/
String END_STATE_RETRY_DEAD_THRESHOLD = SERVER_PREFIX + "endStateRetryDeadThreshold";
/**
* the constant DISTRIBUTED_LOCK_EXPIRE_TIME
*/
String DISTRIBUTED_LOCK_EXPIRE_TIME = SERVER_PREFIX + "distributedLockExpireTime";
/**
* The constant MIN_SERVER_POOL_SIZE.
*/
String MIN_SERVER_POOL_SIZE = TRANSPORT_PREFIX + "minServerPoolSize";
/**
* The constant MAX_SERVER_POOL_SIZE.
*/
String MAX_SERVER_POOL_SIZE = TRANSPORT_PREFIX + "maxServerPoolSize";
/**
* The constant MIN_BRANCH_RESULT_POOL_SIZE.
*/
String MIN_BRANCH_RESULT_POOL_SIZE = TRANSPORT_PREFIX + "minBranchResultPoolSize";
/**
* The constant MAX_BRANCH_RESULT_POOL_SIZE.
*/
String MAX_BRANCH_RESULT_POOL_SIZE = TRANSPORT_PREFIX + "maxBranchResultPoolSize";
/**
* The constant MAX_TASK_QUEUE_SIZE.
*/
String MAX_TASK_QUEUE_SIZE = TRANSPORT_PREFIX + "maxTaskQueueSize";
/**
* The constant KEEP_ALIVE_TIME.
*/
String KEEP_ALIVE_TIME = TRANSPORT_PREFIX + "keepAliveTime";
/**
* The constant MIN_HTTP_POOL_SIZE.
*/
String MIN_HTTP_POOL_SIZE = TRANSPORT_PREFIX + "minHttpPoolSize";
/**
* The constant MAX_HTTP_POOL_SIZE.
*/
String MAX_HTTP_POOL_SIZE = TRANSPORT_PREFIX + "maxHttpPoolSize";
/**
* The constant MAX_HTTP_TASK_QUEUE_SIZE.
*/
String MAX_HTTP_TASK_QUEUE_SIZE = TRANSPORT_PREFIX + "maxHttpTaskQueueSize";
/**
* The constant HTTP_POOL_KEEP_ALIVE_TIME.
*/
String HTTP_POOL_KEEP_ALIVE_TIME = TRANSPORT_PREFIX + "httpPoolKeepAliveTime";
/**
* The constant TRANSPORT_TYPE
*/
@Deprecated
String TRANSPORT_TYPE = TRANSPORT_PREFIX + "type";
/**
* The constant TRANSPORT_SERVER
*/
@Deprecated
String TRANSPORT_SERVER = TRANSPORT_PREFIX + "server";
/**
* The constant TRANSPORT_HEARTBEAT
*/
String TRANSPORT_HEARTBEAT = TRANSPORT_PREFIX + "heartbeat";
/**
* The constant THREAD_FACTORY_PREFIX
*/
String THREAD_FACTORY_PREFIX = TRANSPORT_PREFIX + "threadFactory.";
/**
* The constant BOSS_THREAD_PREFIX
*/
String BOSS_THREAD_PREFIX = THREAD_FACTORY_PREFIX + "bossThreadPrefix";
/**
* The constant WORKER_THREAD_PREFIX
*/
String WORKER_THREAD_PREFIX = THREAD_FACTORY_PREFIX + "workerThreadPrefix";
/**
* The constant SERVER_EXECUTOR_THREAD_PREFIX
*/
String SERVER_EXECUTOR_THREAD_PREFIX = THREAD_FACTORY_PREFIX + "serverExecutorThreadPrefix";
/**
* The constant SHARE_BOSS_WORKER
*/
String SHARE_BOSS_WORKER = THREAD_FACTORY_PREFIX + "shareBossWorker";
/**
* The constant CLIENT_SELECTOR_THREAD_PREFIX
*/
String CLIENT_SELECTOR_THREAD_PREFIX = THREAD_FACTORY_PREFIX + "clientSelectorThreadPrefix";
/**
* The constant CLIENT_SELECTOR_THREAD_SIZE
*/
String CLIENT_SELECTOR_THREAD_SIZE = THREAD_FACTORY_PREFIX + "clientSelectorThreadSize";
/**
* The constant CLIENT_WORKER_THREAD_PREFIX
*/
String CLIENT_WORKER_THREAD_PREFIX = THREAD_FACTORY_PREFIX + "clientWorkerThreadPrefix";
/**
* The constant BOSS_THREAD_SIZE
*/
String BOSS_THREAD_SIZE = THREAD_FACTORY_PREFIX + "bossThreadSize";
/**
* The constant WORKER_THREAD_SIZE
*/
String WORKER_THREAD_SIZE = THREAD_FACTORY_PREFIX + "workerThreadSize";
/**
* The constant ENABLE_SHARED_EVENTLOOP
*/
String ENABLE_CLIENT_SHARED_EVENTLOOP = TRANSPORT_PREFIX + "enableClientSharedEventLoopGroup";
/**
* The constant SHUTDOWN_PREFIX
*/
String SHUTDOWN_PREFIX = TRANSPORT_PREFIX + "shutdown.";
/**
* The constant SHUTDOWN_WAIT
*/
String SHUTDOWN_WAIT = SHUTDOWN_PREFIX + "wait";
/**
* The constant ENABLE_CLIENT_BATCH_SEND_REQUEST
*/
@Deprecated
String ENABLE_CLIENT_BATCH_SEND_REQUEST = TRANSPORT_PREFIX + "enableClientBatchSendRequest";
String TRANSPORT_PROTOCOL = TRANSPORT_PREFIX + "protocol";
/**
* The constant ENABLE_TM_CLIENT_BATCH_SEND_REQUEST
*/
String ENABLE_TM_CLIENT_BATCH_SEND_REQUEST = TRANSPORT_PREFIX + "enableTmClientBatchSendRequest";
/**
* The constant ENABLE_RM_CLIENT_CHANNEL_CHECK_FAIL_FAST
*/
String ENABLE_TM_CLIENT_CHANNEL_CHECK_FAIL_FAST = TRANSPORT_PREFIX + "enableTmClientChannelCheckFailFast";
/**
* The constant ENABLE_RM_CLIENT_CHANNEL_CHECK_FAIL_FAST
*/
String ENABLE_RM_CLIENT_CHANNEL_CHECK_FAIL_FAST = TRANSPORT_PREFIX + "enableRmClientChannelCheckFailFast";
/**
* The constant ENABLE_RM_CLIENT_BATCH_SEND_REQUEST
*/
String ENABLE_RM_CLIENT_BATCH_SEND_REQUEST = TRANSPORT_PREFIX + "enableRmClientBatchSendRequest";
/**
* The constant ENABLE_TC_SERVER_BATCH_SEND_RESPONSE
*/
String ENABLE_TC_SERVER_BATCH_SEND_RESPONSE = TRANSPORT_PREFIX + "enableTcServerBatchSendResponse";
/**
* The constant DISABLE_GLOBAL_TRANSACTION.
*/
String DISABLE_GLOBAL_TRANSACTION = SERVICE_PREFIX + "disableGlobalTransaction";
/**
* The constant SQL_PARSER_TYPE.
*/
String SQL_PARSER_TYPE = CLIENT_RM_PREFIX + "sqlParserType";
/**
* The constant STORE_REDIS_MODE.
*/
String STORE_REDIS_MODE = STORE_REDIS_PREFIX + "mode";
/**
* The constant STORE_REDIS_TYPE. lua pipeline
*/
String STORE_REDIS_TYPE = STORE_REDIS_PREFIX + "type";
/**
* The constant STORE_REDIS_HOST.
*/
String STORE_REDIS_HOST = STORE_REDIS_PREFIX + "host";
/**
* The constant STORE_REDIS_PORT.
*/
String STORE_REDIS_PORT = STORE_REDIS_PREFIX + "port";
/**
* The constant STORE_REDIS_SINGLE_PREFIX.
*/
String STORE_REDIS_SINGLE_PREFIX = STORE_REDIS_PREFIX + "single.";
/**
* The constant STORE_REDIS_SINGLE_HOST.
*/
String STORE_REDIS_SINGLE_HOST = STORE_REDIS_SINGLE_PREFIX + "host";
/**
* The constant STORE_MIN_Conn.
*/
String STORE_REDIS_MIN_CONN = STORE_REDIS_PREFIX + "minConn";
/**
* The constant STORE_REDIS_SINGLE_PORT.
*/
String STORE_REDIS_SINGLE_PORT = STORE_REDIS_SINGLE_PREFIX + "port";
/**
* The constant STORE_REDIS_MAX_CONN.
*/
String STORE_REDIS_MAX_CONN = STORE_REDIS_PREFIX + "maxConn";
/**
* the constant STORE_REDIS_MAX_TOTAL
*/
String STORE_REDIS_MAX_TOTAL = STORE_REDIS_PREFIX + "maxTotal";
/**
* The constant STORE_REDIS_DATABASE.
*/
String STORE_REDIS_DATABASE = STORE_REDIS_PREFIX + "database";
/**
* The constant STORE_REDIS_PASSWORD.
*/
String STORE_REDIS_PASSWORD = STORE_REDIS_PREFIX + "password";
/**
* The constant STORE_REDIS_QUERY_LIMIT.
*/
String STORE_REDIS_QUERY_LIMIT = STORE_REDIS_PREFIX + "queryLimit";
/**
* The constant REDIS_SENTINEL_MODE.
*/
String REDIS_SENTINEL_MODE = "sentinel";
/**
* The constant REDIS_SINGLE_MODE.
*/
String REDIS_SINGLE_MODE = "single";
/**
* The constant STORE_REDIS_SENTINEL_PREFIX.
*/
String STORE_REDIS_SENTINEL_PREFIX = STORE_REDIS_PREFIX + "sentinel.";
/**
* STORE_REDIS_SENTINEL_MASTERNAME.
*/
String STORE_REDIS_SENTINEL_MASTERNAME = STORE_REDIS_SENTINEL_PREFIX + "masterName";
/**
* STORE_REDIS_SENTINEL_HOST.
*/
String STORE_REDIS_SENTINEL_HOST = STORE_REDIS_SENTINEL_PREFIX + "sentinelHosts";
/**
* STORE_REDIS_SENTINEL_PASSWORD.
*/
String STORE_REDIS_SENTINEL_PASSWORD = STORE_REDIS_SENTINEL_PREFIX + "sentinelPassword";
/**
* The constant CLIENT_DEGRADE_CHECK_PERIOD.
*/
String CLIENT_DEGRADE_CHECK_PERIOD = CLIENT_TM_PREFIX + "degradeCheckPeriod";
/**
* The constant CLIENT_DEGRADE_CHECK.
*/
String CLIENT_DEGRADE_CHECK = CLIENT_TM_PREFIX + "degradeCheck";
/**
* The constant CLIENT_DEGRADE_CHECK_ALLOW_TIMES.
*/
String CLIENT_DEGRADE_CHECK_ALLOW_TIMES = CLIENT_TM_PREFIX + "degradeCheckAllowTimes";
/**
* The constant GLOBAL_TRANSACTION_INTERCEPTOR_ORDER.
*/
String TM_INTERCEPTOR_ORDER = CLIENT_TM_PREFIX + "interceptorOrder";
/**
* The constant ACCESS_KEY.
*/
String ACCESS_KEY = "accesskey";
/**
* The constant SECRET_KEY.
*/
String SECRET_KEY = "secretkey";
/**
* The constant SEATA_ACCESS_KEY.
*/
String SEATA_ACCESS_KEY = SEATA_PREFIX + ACCESS_KEY;
/**
* The constant SEATA_SECRET_KEY.
*/
String SEATA_SECRET_KEY = SEATA_PREFIX + SECRET_KEY;
/**
* The constant EXTRA_DATA_SPLIT_CHAR.
*/
String EXTRA_DATA_SPLIT_CHAR = "\n";
/**
* The constant EXTRA_DATA_KV_CHAR.
*/
String EXTRA_DATA_KV_CHAR = "=";
/**
* The constant SERVER_ENABLE_CHECK_AUTH.
*/
String SERVER_ENABLE_CHECK_AUTH = SERVER_PREFIX + "enableCheckAuth";
/**
* The constant NAMING_SERVER
*/
String NAMING_SERVER = "seata";
/**
* The constant APPLICATION_ID.
*/
String APPLICATION_ID = "applicationId";
/**
* The constant TX_SERVICE_GROUP.
*/
String TX_SERVICE_GROUP = "txServiceGroup";
/**
* The constant DATA_SOURCE_PROXY_MODE.
*/
String DATA_SOURCE_PROXY_MODE = "dataSourceProxyMode";
/**
* The constant TCC_PREFIX
*/
String TCC_PREFIX = "tcc.";
/**
* The constant TCC_FENCE_PREFIX
*/
String TCC_FENCE_PREFIX = TCC_PREFIX + "fence.";
/**
* The constant TCC_FENCE_CLEAN_PERIOD
*/
String TCC_FENCE_CLEAN_PERIOD = TCC_FENCE_PREFIX + "cleanPeriod";
/**
* The constant TCC_FENCE_LOG_TABLE_NAME
*/
String TCC_FENCE_LOG_TABLE_NAME = TCC_FENCE_PREFIX + "logTableName";
/**
* The constant TCC_BUSINESS_ACTION_CONTEXT_JSON_PARSER_NAME
*/
String TCC_BUSINESS_ACTION_CONTEXT_JSON_PARSER_NAME = TCC_PREFIX + "contextJsonParserType";
/**
* The constant rpcRmRequestTimeout
*/
String RPC_RM_REQUEST_TIMEOUT = TRANSPORT_PREFIX + "rpcRmRequestTimeout";
/**
* The constant RPC_TM_REQUEST_TIMEOUT
*/
String RPC_TM_REQUEST_TIMEOUT = TRANSPORT_PREFIX + "rpcTmRequestTimeout";
/**
* The constant RPC_TM_REQUEST_TIMEOUT
*/
String RPC_TC_REQUEST_TIMEOUT = TRANSPORT_PREFIX + "rpcTcRequestTimeout";
/**
* The constant SESSION_BRANCH_ASYNC_QUEUE_SIZE
*/
String SESSION_BRANCH_ASYNC_QUEUE_SIZE = SERVER_PREFIX + SESSION_PREFIX + "branchAsyncQueueSize";
/**
* The constant ENABLE_BRANCH_ASYNC_REMOVE
*/
String ENABLE_BRANCH_ASYNC_REMOVE = SERVER_PREFIX + SESSION_PREFIX + "enableBranchAsyncRemove";
/**
* The constant SERVER_RAFT.
*/
String SERVER_RAFT = SERVER_PREFIX + "raft.";
/**
* The constant SERVER_RAFT_SSL.
*/
String SERVER_RAFT_SSL = SERVER_RAFT + "ssl.";
/**
* The constant SERVER_RAFT_SSL_CLIENT.
*/
String SERVER_RAFT_SSL_CLIENT = SERVER_RAFT_SSL + "client.";
/**
* The constant SERVER_RAFT_SSL_SERVER.
*/
String SERVER_RAFT_SSL_SERVER = SERVER_RAFT_SSL + "server.";
/**
* The constant SERVER_RAFT_SERVER_ADDR.
*/
String SERVER_RAFT_SERVER_ADDR = SERVER_RAFT + "serverAddr";
/**
* The constant SERVER_RAFT_GROUP.
*/
String SERVER_RAFT_GROUP = SERVER_RAFT + "group";
/**
* The constant SERVER_RAFT_SNAPSHOT_INTERVAL.
*/
String SERVER_RAFT_SNAPSHOT_INTERVAL = SERVER_RAFT + "snapshotInterval";
/**
* The constant SERVER_RAFT_DISRUPTOR_BUFFER_SIZE.
*/
String SERVER_RAFT_DISRUPTOR_BUFFER_SIZE = SERVER_RAFT + "disruptorBufferSize";
/**
* The constant SERVER_RAFT_MAX_REPLICATOR_INFLIGHT_MSGS.
*/
String SERVER_RAFT_MAX_REPLICATOR_INFLIGHT_MSGS = SERVER_RAFT + "maxReplicatorInflightMsgs";
/**
* The constant SERVER_RAFT_SYNC.
*/
String SERVER_RAFT_SYNC = SERVER_RAFT + "sync";
/**
* The constant SERVER_RAFT_SSL_ENABLED.
*/
String SERVER_RAFT_SSL_ENABLED = SERVER_RAFT_SSL + "enabled";
/**
* The constant SERVER_RAFT_SSL_SERVER_KEYSTORE.
*/
String SERVER_RAFT_SSL_SERVER_KEYSTORE_PATH = SERVER_RAFT_SSL_SERVER + "keystore.path";
/**
* The constant SERVER_RAFT_SSL_CLIENT_KEYSTORE.
*/
String SERVER_RAFT_SSL_CLIENT_KEYSTORE_PATH = SERVER_RAFT_SSL_CLIENT + "keystore.path";
/**
* The constant SERVER_RAFT_SSL_SERVER_KEYSTORE_PASSWORD.
*/
String SERVER_RAFT_SSL_SERVER_KEYSTORE_PASSWORD = SERVER_RAFT_SSL_SERVER + "keystore.password";
/**
* The constant SERVER_RAFT_SSL_CLIENT_KEYSTORE_PASSWORD.
*/
String SERVER_RAFT_SSL_CLIENT_KEYSTORE_PASSWORD = SERVER_RAFT_SSL_CLIENT + "keystore.password";
/**
* The constant SERVER_RAFT_SSL_CLIENT_KEYSTORE_TYPE.
*/
String SERVER_RAFT_SSL_CLIENT_KEYSTORE_TYPE = SERVER_RAFT_SSL_CLIENT + "keystore.type";
/**
* The constant SERVER_RAFT_SSL_SERVER_KEYSTORE_TYPE.
*/
String SERVER_RAFT_SSL_SERVER_KEYSTORE_TYPE = SERVER_RAFT_SSL_SERVER + "keystore.type";
/**
* The constant SERVER_RAFT_SSL_KMF_ALGORITHM.
*/
String SERVER_RAFT_SSL_KMF_ALGORITHM = SERVER_RAFT_SSL + "kmfAlgorithm";
/**
* The constant SERVER_RAFT_SSL_KMF_ALGORITHM.
*/
String SERVER_RAFT_SSL_TMF_ALGORITHM = SERVER_RAFT_SSL + "tmfAlgorithm";
/**
* The constant SERVER_RAFT_MAX_APPEND_BUFFER_SIZE.
*/
String SERVER_RAFT_MAX_APPEND_BUFFER_SIZE = SERVER_RAFT + "maxAppendBufferSize";
/**
* The constant SERVER_RAFT_APPLY_BATCH.
*/
String SERVER_RAFT_APPLY_BATCH = SERVER_RAFT + "applyBatch";
/**
* The constant SERVER_RAFT_APPLY_BATCH.
*/
String SERVER_RAFT_ELECTION_TIMEOUT_MS = SERVER_RAFT + "electionTimeoutMs";
/**
* The constant SERVER_RAFT_REPORTER_ENABLED.
*/
String SERVER_RAFT_REPORTER_ENABLED = SERVER_RAFT + "reporterEnabled";
/**
* The constant SERVER_RAFT_REPORTER_INITIAL_DELAY.
*/
String SERVER_RAFT_REPORTER_INITIAL_DELAY = SERVER_RAFT + "reporterInitialDelay";
/**
* The constant SERVER_RAFT_SERIALIZATION.
*/
String SERVER_RAFT_SERIALIZATION = SERVER_RAFT + "serialization";
/**
* The constant SERVER_RAFT_COMPRESSOR.
*/
String SERVER_RAFT_COMPRESSOR = SERVER_RAFT + "compressor";
/**
* The constant SERVER_HTTP.
*/
String SERVER_HTTP = SERVER_PREFIX + "http.";
String SERVER_HTTP_FILTER_PREFIX = SERVER_HTTP + "filter.";
/**
* The constant SERVER_HTTP_FILTER_XSS_FILTER_KEYWORDS.
*
*/
String SERVER_HTTP_FILTER_XSS_FILTER_KEYWORDS = SERVER_HTTP_FILTER_PREFIX + "xss.keywords";
/**
* The constant IS_USE_CLOUD_NAMESPACE_PARSING.
*/
String IS_USE_CLOUD_NAMESPACE_PARSING = "isUseCloudNamespaceParsing";
/**
* The constant IS_USE_ENDPOINT_PARSING_RULE.
*/
String IS_USE_ENDPOINT_PARSING_RULE = "isUseEndpointParsingRule";
/**
* The constant XAER_NOTA_RETRY_TIMEOUT
*/
String XAER_NOTA_RETRY_TIMEOUT = SERVER_PREFIX + "xaerNotaRetryTimeout";
/**
* The constant XA_BRANCH_EXECUTION_TIMEOUT
*/
String XA_BRANCH_EXECUTION_TIMEOUT = CLIENT_RM_PREFIX + "branchExecutionTimeoutXA";
/**
* The constant XA_CONNECTION_TWO_PHASE_HOLD_TIMEOUT
*/
String XA_CONNECTION_TWO_PHASE_HOLD_TIMEOUT = CLIENT_RM_PREFIX + "connectionTwoPhaseHoldTimeoutXA";
/**
* The constant ENABLE_PARALLEL_REQUEST_HANDLE_KEY
*/
String ENABLE_PARALLEL_REQUEST_HANDLE_KEY = SERVER_PREFIX + "enableParallelRequestHandle";
/**
* The constant ENABLE_PARALLEL_HANDLE_BRANCH_KEY
*/
String ENABLE_PARALLEL_HANDLE_BRANCH_KEY = SERVER_PREFIX + "enableParallelHandleBranch";
/**
* The constant RM_APPLICATION_DATA_SIZE_ERROR
*/
String RM_APPLICATION_DATA_SIZE_LIMIT = CLIENT_RM_PREFIX + "applicationDataLimit";
/**
* The constant RM_APPLICATION_DATA_SIZE_CHECK
*/
String RM_APPLICATION_DATA_SIZE_CHECK = CLIENT_RM_PREFIX + "applicationDataLimitCheck";
/**
* The constant SERVER_APPLICATION_DATA_SIZE_ERROR
*/
String SERVER_APPLICATION_DATA_SIZE_LIMIT = SERVER_PREFIX + "applicationDataLimit";
/**
* The constant SERVER_APPLICATION_DATA_SIZE_CHECK
*/
String SERVER_APPLICATION_DATA_SIZE_CHECK = SERVER_PREFIX + "applicationDataLimitCheck";
/**
* The constant ROCKET_MQ_MSG_TIMEOUT
*/
String ROCKET_MQ_MSG_TIMEOUT = SERVER_PREFIX + "rocketmqMsgTimeout";
/**
*
*/
String NAMINGSERVER_REGISTRY_PREFIX =
FILE_ROOT_REGISTRY + FILE_CONFIG_SPLIT_CHAR + NAMING_SERVER + FILE_CONFIG_SPLIT_CHAR;
/**
*
*/
String SEATA_NAMINGSERVER_REGISTRY_PREFIX =
SEATA_FILE_ROOT_CONFIG + FILE_CONFIG_SPLIT_CHAR + NAMINGSERVER_REGISTRY_PREFIX;
/**
* The constant REGISTRY_NAMINGSERVER_CLUSTER
*/
String REGISTRY_NAMINGSERVER_CLUSTER = NAMINGSERVER_REGISTRY_PREFIX + "cluster";
/**
* The constant VGROUP_TABLE_NAME
*/
String VGROUP_TABLE_NAME = STORE_DB_PREFIX + "vgroupTable";
/**
* The constant NAMESPACE_KEY
*/
String NAMESPACE_KEY = SEATA_NAMINGSERVER_REGISTRY_PREFIX + "namespace";
/**
* The constant CLUSTER_NAME_KEY
*/
String CLUSTER_NAME_KEY = SEATA_FILE_ROOT_CONFIG + FILE_CONFIG_SPLIT_CHAR + REGISTRY_NAMINGSERVER_CLUSTER;
/**
* The constant META_PREFIX
*/
String META_PREFIX =
SEATA_FILE_ROOT_CONFIG + FILE_CONFIG_SPLIT_CHAR + FILE_ROOT_REGISTRY + FILE_CONFIG_SPLIT_CHAR + "metadata.";
/**
* The constant SERVER_REGISTRY_METADATA_PREFIX
*/
String SERVER_REGISTRY_METADATA_PREFIX = SERVER_PREFIX + FILE_ROOT_REGISTRY + ".metadata";
/**
* The constant SERVER_REGISTRY_METADATA_EXTERNAL
*/
String SERVER_REGISTRY_METADATA_EXTERNAL = SERVER_REGISTRY_METADATA_PREFIX + ".external";
/**
* The constant RATE_LIMIT_PREFIX.
*/
String RATE_LIMIT_PREFIX = SERVER_PREFIX + "ratelimit";
/**
* The constant RATE_LIMIT_BUCKET_TOKEN_NUM_PER_SECOND.
*/
String RATE_LIMIT_BUCKET_TOKEN_NUM_PER_SECOND = RATE_LIMIT_PREFIX + ".bucketTokenNumPerSecond";
/**
* The constant RATE_LIMIT_ENABLE.
*/
String RATE_LIMIT_ENABLE = RATE_LIMIT_PREFIX + ".enable";
/**
* The constant RATE_LIMIT_BUCKET_TOKEN_MAX_NUM.
*/
String RATE_LIMIT_BUCKET_TOKEN_MAX_NUM = RATE_LIMIT_PREFIX + ".bucketTokenMaxNum";
/**
* The constant RATE_LIMIT_BUCKET_TOKEN_INITIAL_NUM.
*/
String RATE_LIMIT_BUCKET_TOKEN_INITIAL_NUM = RATE_LIMIT_PREFIX + ".bucketTokenInitialNum";
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.